input
stringlengths
53
297k
output
stringclasses
604 values
repo_name
stringclasses
376 values
test_path
stringclasses
583 values
code_path
stringlengths
7
116
"""Homematic base entity.""" from abc import abstractmethod from datetime import timedelta import logging from homeassistant.const import ATTR_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from .const import ( ATTR_ADDRESS, ATTR_CHANNEL, ATTR_INTERFACE, ATTR_PARAM, ATTR_UNIQUE_ID, DATA_HOMEMATIC, DOMAIN, HM_ATTRIBUTE_SUPPORT, ) _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL_HUB = timedelta(seconds=300) SCAN_INTERVAL_VARIABLES = timedelta(seconds=30) class HMDevice(Entity): """The HomeMatic device base object.""" def __init__(self, config): """Initialize a generic HomeMatic device.""" self._name = config.get(ATTR_NAME) self._address = config.get(ATTR_ADDRESS) self._interface = config.get(ATTR_INTERFACE) self._channel = config.get(ATTR_CHANNEL) self._state = config.get(ATTR_PARAM) self._unique_id = config.get(ATTR_UNIQUE_ID) self._data = {} self._homematic = None self._hmdevice = None self._connected = False self._available = False self._channel_map = set() # Set parameter to uppercase if self._state: self._state = self._state.upper() async def async_added_to_hass(self): """Load data init callbacks.""" self._subscribe_homematic_events() @property def unique_id(self): """Return unique ID. HomeMatic entity IDs are unique by default.""" return self._unique_id.replace(" ", "_") @property def should_poll(self): """Return false. HomeMatic states are pushed by the XML-RPC Server.""" return False @property def name(self): """Return the name of the device.""" return self._name @property def available(self): """Return true if device is available.""" return self._available @property def device_state_attributes(self): """Return device specific state attributes.""" # Static attributes attr = { "id": self._hmdevice.ADDRESS, "interface": self._interface, } # Generate a dictionary with attributes for node, data in HM_ATTRIBUTE_SUPPORT.items(): # Is an attribute and exists for this object if node in self._data: value = data[1].get(self._data[node], self._data[node]) attr[data[0]] = value return attr def update(self): """Connect to HomeMatic init values.""" if self._connected: return True # Initialize self._homematic = self.hass.data[DATA_HOMEMATIC] self._hmdevice = self._homematic.devices[self._interface][self._address] self._connected = True try: # Initialize datapoints of this object self._init_data() self._load_data_from_hm() # Link events from pyhomematic self._available = not self._hmdevice.UNREACH except Exception as err: # pylint: disable=broad-except self._connected = False _LOGGER.error("Exception while linking %s: %s", self._address, str(err)) def _hm_event_callback(self, device, caller, attribute, value): """Handle all pyhomematic device events.""" has_changed = False # Is data needed for this instance? if f"{attribute}:{device.partition(':')[2]}" in self._channel_map: self._data[attribute] = value has_changed = True # Availability has changed if self.available != (not self._hmdevice.UNREACH): self._available = not self._hmdevice.UNREACH has_changed = True # If it has changed data point, update Home Assistant if has_changed: self.schedule_update_ha_state() def _subscribe_homematic_events(self): """Subscribe all required events to handle job.""" for metadata in ( self._hmdevice.SENSORNODE, self._hmdevice.BINARYNODE, self._hmdevice.ATTRIBUTENODE, self._hmdevice.WRITENODE, self._hmdevice.EVENTNODE, self._hmdevice.ACTIONNODE, ): for node, channels in metadata.items(): # Data is needed for this instance if node in self._data: # chan is current channel if len(channels) == 1: channel = channels[0] else: channel = self._channel # Remember the channel for this attribute to ignore invalid events later self._channel_map.add(f"{node}:{channel!s}") # Set callbacks self._hmdevice.setEventCallback(callback=self._hm_event_callback, bequeath=True) def _load_data_from_hm(self): """Load first value from pyhomematic.""" if not self._connected: return False # Read data from pyhomematic for metadata, funct in ( (self._hmdevice.ATTRIBUTENODE, self._hmdevice.getAttributeData), (self._hmdevice.WRITENODE, self._hmdevice.getWriteData), (self._hmdevice.SENSORNODE, self._hmdevice.getSensorData), (self._hmdevice.BINARYNODE, self._hmdevice.getBinaryData), ): for node in metadata: if metadata[node] and node in self._data: self._data[node] = funct(name=node, channel=self._channel) return True def _hm_set_state(self, value): """Set data to main datapoint.""" if self._state in self._data: self._data[self._state] = value def _hm_get_state(self): """Get data from main datapoint.""" if self._state in self._data: return self._data[self._state] return None def _init_data(self): """Generate a data dict (self._data) from the HomeMatic metadata.""" # Add all attributes to data dictionary for data_note in self._hmdevice.ATTRIBUTENODE: self._data.update({data_note: None}) # Initialize device specific data self._init_data_struct() @abstractmethod def _init_data_struct(self): """Generate a data dictionary from the HomeMatic device metadata.""" class HMHub(Entity): """The HomeMatic hub. (CCU2/HomeGear).""" def __init__(self, hass, homematic, name): """Initialize HomeMatic hub.""" self.hass = hass self.entity_id = f"{DOMAIN}.{name.lower()}" self._homematic = homematic self._variables = {} self._name = name self._state = None # Load data self.hass.helpers.event.track_time_interval(self._update_hub, SCAN_INTERVAL_HUB) self.hass.add_job(self._update_hub, None) self.hass.helpers.event.track_time_interval( self._update_variables, SCAN_INTERVAL_VARIABLES ) self.hass.add_job(self._update_variables, None) @property def name(self): """Return the name of the device.""" return self._name @property def should_poll(self): """Return false. HomeMatic Hub object updates variables.""" return False @property def state(self): """Return the state of the entity.""" return self._state @property def state_attributes(self): """Return the state attributes.""" return self._variables.copy() @property def icon(self): """Return the icon to use in the frontend, if any.""" return "mdi:gradient" def _update_hub(self, now): """Retrieve latest state.""" service_message = self._homematic.getServiceMessages(self._name) state = None if service_message is None else len(service_message) # state have change? if self._state != state: self._state = state self.schedule_update_ha_state() def _update_variables(self, now): """Retrieve all variable data and update hmvariable states.""" variables = self._homematic.getAllSystemVariables(self._name) if variables is None: return state_change = False for key, value in variables.items(): if key in self._variables and value == self._variables[key]: continue state_change = True self._variables.update({key: value}) if state_change: self.schedule_update_ha_state() def hm_set_variable(self, name, value): """Set variable value on CCU/Homegear.""" if name not in self._variables: _LOGGER.error("Variable %s not found on %s", name, self.name) return old_value = self._variables.get(name) if isinstance(old_value, bool): value = cv.boolean(value) else: value = float(value) self._homematic.setSystemVariable(self.name, name, value) self._variables.update({name: value}) self.schedule_update_ha_state()
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/homematic/entity.py
"""Support for One-Time Password (OTP).""" import time import pyotp import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONF_NAME, CONF_TOKEN from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity DEFAULT_NAME = "OTP Sensor" TIME_STEP = 30 # Default time step assumed by Google Authenticator ICON = "mdi:update" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_TOKEN): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the OTP sensor.""" name = config.get(CONF_NAME) token = config.get(CONF_TOKEN) async_add_entities([TOTPSensor(name, token)], True) return True # Only TOTP supported at the moment, HOTP might be added later class TOTPSensor(Entity): """Representation of a TOTP sensor.""" def __init__(self, name, token): """Initialize the sensor.""" self._name = name self._otp = pyotp.TOTP(token) self._state = None self._next_expiration = None async def async_added_to_hass(self): """Handle when an entity is about to be added to Home Assistant.""" self._call_loop() @callback def _call_loop(self): self._state = self._otp.now() self.async_write_ha_state() # Update must occur at even TIME_STEP, e.g. 12:00:00, 12:00:30, # 12:01:00, etc. in order to have synced time (see RFC6238) self._next_expiration = TIME_STEP - (time.time() % TIME_STEP) self.hass.loop.call_later(self._next_expiration, self._call_loop) @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def should_poll(self): """No polling needed.""" return False @property def icon(self): """Return the icon to use in the frontend.""" return ICON
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/otp/sensor.py
"""Support for Dyson Pure Cool link fan.""" import logging from libpurecool.const import FanMode, FanSpeed, NightMode, Oscillation from libpurecool.dyson_pure_cool import DysonPureCool from libpurecool.dyson_pure_cool_link import DysonPureCoolLink from libpurecool.dyson_pure_state import DysonPureCoolState from libpurecool.dyson_pure_state_v2 import DysonPureCoolV2State import voluptuous as vol from homeassistant.components.fan import ( SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, SUPPORT_OSCILLATE, SUPPORT_SET_SPEED, FanEntity, ) from homeassistant.const import ATTR_ENTITY_ID import homeassistant.helpers.config_validation as cv from . import DYSON_DEVICES _LOGGER = logging.getLogger(__name__) ATTR_NIGHT_MODE = "night_mode" ATTR_AUTO_MODE = "auto_mode" ATTR_ANGLE_LOW = "angle_low" ATTR_ANGLE_HIGH = "angle_high" ATTR_FLOW_DIRECTION_FRONT = "flow_direction_front" ATTR_TIMER = "timer" ATTR_HEPA_FILTER = "hepa_filter" ATTR_CARBON_FILTER = "carbon_filter" ATTR_DYSON_SPEED = "dyson_speed" ATTR_DYSON_SPEED_LIST = "dyson_speed_list" DYSON_DOMAIN = "dyson" DYSON_FAN_DEVICES = "dyson_fan_devices" SERVICE_SET_NIGHT_MODE = "set_night_mode" SERVICE_SET_AUTO_MODE = "set_auto_mode" SERVICE_SET_ANGLE = "set_angle" SERVICE_SET_FLOW_DIRECTION_FRONT = "set_flow_direction_front" SERVICE_SET_TIMER = "set_timer" SERVICE_SET_DYSON_SPEED = "set_speed" DYSON_SET_NIGHT_MODE_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_NIGHT_MODE): cv.boolean, } ) SET_AUTO_MODE_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_AUTO_MODE): cv.boolean, } ) SET_ANGLE_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_ANGLE_LOW): cv.positive_int, vol.Required(ATTR_ANGLE_HIGH): cv.positive_int, } ) SET_FLOW_DIRECTION_FRONT_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_FLOW_DIRECTION_FRONT): cv.boolean, } ) SET_TIMER_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_TIMER): cv.positive_int, } ) SET_DYSON_SPEED_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_DYSON_SPEED): cv.positive_int, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Dyson fan components.""" if discovery_info is None: return _LOGGER.debug("Creating new Dyson fans") if DYSON_FAN_DEVICES not in hass.data: hass.data[DYSON_FAN_DEVICES] = [] # Get Dyson Devices from parent component has_purecool_devices = False device_serials = [device.serial for device in hass.data[DYSON_FAN_DEVICES]] for device in hass.data[DYSON_DEVICES]: if device.serial not in device_serials: if isinstance(device, DysonPureCool): has_purecool_devices = True dyson_entity = DysonPureCoolDevice(device) hass.data[DYSON_FAN_DEVICES].append(dyson_entity) elif isinstance(device, DysonPureCoolLink): dyson_entity = DysonPureCoolLinkDevice(hass, device) hass.data[DYSON_FAN_DEVICES].append(dyson_entity) add_entities(hass.data[DYSON_FAN_DEVICES]) def service_handle(service): """Handle the Dyson services.""" entity_id = service.data[ATTR_ENTITY_ID] fan_device = next( (fan for fan in hass.data[DYSON_FAN_DEVICES] if fan.entity_id == entity_id), None, ) if fan_device is None: _LOGGER.warning("Unable to find Dyson fan device %s", str(entity_id)) return if service.service == SERVICE_SET_NIGHT_MODE: fan_device.set_night_mode(service.data[ATTR_NIGHT_MODE]) if service.service == SERVICE_SET_AUTO_MODE: fan_device.set_auto_mode(service.data[ATTR_AUTO_MODE]) if service.service == SERVICE_SET_ANGLE: fan_device.set_angle( service.data[ATTR_ANGLE_LOW], service.data[ATTR_ANGLE_HIGH] ) if service.service == SERVICE_SET_FLOW_DIRECTION_FRONT: fan_device.set_flow_direction_front(service.data[ATTR_FLOW_DIRECTION_FRONT]) if service.service == SERVICE_SET_TIMER: fan_device.set_timer(service.data[ATTR_TIMER]) if service.service == SERVICE_SET_DYSON_SPEED: fan_device.set_dyson_speed(service.data[ATTR_DYSON_SPEED]) # Register dyson service(s) hass.services.register( DYSON_DOMAIN, SERVICE_SET_NIGHT_MODE, service_handle, schema=DYSON_SET_NIGHT_MODE_SCHEMA, ) hass.services.register( DYSON_DOMAIN, SERVICE_SET_AUTO_MODE, service_handle, schema=SET_AUTO_MODE_SCHEMA ) if has_purecool_devices: hass.services.register( DYSON_DOMAIN, SERVICE_SET_ANGLE, service_handle, schema=SET_ANGLE_SCHEMA ) hass.services.register( DYSON_DOMAIN, SERVICE_SET_FLOW_DIRECTION_FRONT, service_handle, schema=SET_FLOW_DIRECTION_FRONT_SCHEMA, ) hass.services.register( DYSON_DOMAIN, SERVICE_SET_TIMER, service_handle, schema=SET_TIMER_SCHEMA ) hass.services.register( DYSON_DOMAIN, SERVICE_SET_DYSON_SPEED, service_handle, schema=SET_DYSON_SPEED_SCHEMA, ) class DysonPureCoolLinkDevice(FanEntity): """Representation of a Dyson fan.""" def __init__(self, hass, device): """Initialize the fan.""" _LOGGER.debug("Creating device %s", device.name) self.hass = hass self._device = device async def async_added_to_hass(self): """Call when entity is added to hass.""" self._device.add_message_listener(self.on_message) def on_message(self, message): """Call when new messages received from the fan.""" if isinstance(message, DysonPureCoolState): _LOGGER.debug("Message received for fan device %s: %s", self.name, message) self.schedule_update_ha_state() @property def should_poll(self): """No polling needed.""" return False @property def name(self): """Return the display name of this fan.""" return self._device.name def set_speed(self, speed: str) -> None: """Set the speed of the fan. Never called ??.""" _LOGGER.debug("Set fan speed to: %s", speed) if speed == FanSpeed.FAN_SPEED_AUTO.value: self._device.set_configuration(fan_mode=FanMode.AUTO) else: fan_speed = FanSpeed(f"{int(speed):04d}") self._device.set_configuration(fan_mode=FanMode.FAN, fan_speed=fan_speed) def turn_on(self, speed: str = None, **kwargs) -> None: """Turn on the fan.""" _LOGGER.debug("Turn on fan %s with speed %s", self.name, speed) if speed: if speed == FanSpeed.FAN_SPEED_AUTO.value: self._device.set_configuration(fan_mode=FanMode.AUTO) else: fan_speed = FanSpeed(f"{int(speed):04d}") self._device.set_configuration( fan_mode=FanMode.FAN, fan_speed=fan_speed ) else: # Speed not set, just turn on self._device.set_configuration(fan_mode=FanMode.FAN) def turn_off(self, **kwargs) -> None: """Turn off the fan.""" _LOGGER.debug("Turn off fan %s", self.name) self._device.set_configuration(fan_mode=FanMode.OFF) def oscillate(self, oscillating: bool) -> None: """Turn on/off oscillating.""" _LOGGER.debug("Turn oscillation %s for device %s", oscillating, self.name) if oscillating: self._device.set_configuration(oscillation=Oscillation.OSCILLATION_ON) else: self._device.set_configuration(oscillation=Oscillation.OSCILLATION_OFF) @property def oscillating(self): """Return the oscillation state.""" return self._device.state and self._device.state.oscillation == "ON" @property def is_on(self): """Return true if the entity is on.""" if self._device.state: return self._device.state.fan_mode == "FAN" return False @property def speed(self) -> str: """Return the current speed.""" if self._device.state: if self._device.state.speed == FanSpeed.FAN_SPEED_AUTO.value: return self._device.state.speed return int(self._device.state.speed) return None @property def current_direction(self): """Return direction of the fan [forward, reverse].""" return None @property def night_mode(self): """Return Night mode.""" return self._device.state.night_mode == "ON" def set_night_mode(self, night_mode: bool) -> None: """Turn fan in night mode.""" _LOGGER.debug("Set %s night mode %s", self.name, night_mode) if night_mode: self._device.set_configuration(night_mode=NightMode.NIGHT_MODE_ON) else: self._device.set_configuration(night_mode=NightMode.NIGHT_MODE_OFF) @property def auto_mode(self): """Return auto mode.""" return self._device.state.fan_mode == "AUTO" def set_auto_mode(self, auto_mode: bool) -> None: """Turn fan in auto mode.""" _LOGGER.debug("Set %s auto mode %s", self.name, auto_mode) if auto_mode: self._device.set_configuration(fan_mode=FanMode.AUTO) else: self._device.set_configuration(fan_mode=FanMode.FAN) @property def speed_list(self) -> list: """Get the list of available speeds.""" supported_speeds = [ FanSpeed.FAN_SPEED_AUTO.value, int(FanSpeed.FAN_SPEED_1.value), int(FanSpeed.FAN_SPEED_2.value), int(FanSpeed.FAN_SPEED_3.value), int(FanSpeed.FAN_SPEED_4.value), int(FanSpeed.FAN_SPEED_5.value), int(FanSpeed.FAN_SPEED_6.value), int(FanSpeed.FAN_SPEED_7.value), int(FanSpeed.FAN_SPEED_8.value), int(FanSpeed.FAN_SPEED_9.value), int(FanSpeed.FAN_SPEED_10.value), ] return supported_speeds @property def supported_features(self) -> int: """Flag supported features.""" return SUPPORT_OSCILLATE | SUPPORT_SET_SPEED @property def device_state_attributes(self) -> dict: """Return optional state attributes.""" return {ATTR_NIGHT_MODE: self.night_mode, ATTR_AUTO_MODE: self.auto_mode} class DysonPureCoolDevice(FanEntity): """Representation of a Dyson Purecool (TP04/DP04) fan.""" def __init__(self, device): """Initialize the fan.""" self._device = device async def async_added_to_hass(self): """Call when entity is added to hass.""" self._device.add_message_listener(self.on_message) def on_message(self, message): """Call when new messages received from the fan.""" if isinstance(message, DysonPureCoolV2State): _LOGGER.debug("Message received for fan device %s: %s", self.name, message) self.schedule_update_ha_state() @property def should_poll(self): """No polling needed.""" return False @property def name(self): """Return the display name of this fan.""" return self._device.name def turn_on(self, speed: str = None, **kwargs) -> None: """Turn on the fan.""" _LOGGER.debug("Turn on fan %s", self.name) if speed is not None: self.set_speed(speed) else: self._device.turn_on() def set_speed(self, speed: str) -> None: """Set the speed of the fan.""" if speed == SPEED_LOW: self._device.set_fan_speed(FanSpeed.FAN_SPEED_4) elif speed == SPEED_MEDIUM: self._device.set_fan_speed(FanSpeed.FAN_SPEED_7) elif speed == SPEED_HIGH: self._device.set_fan_speed(FanSpeed.FAN_SPEED_10) def turn_off(self, **kwargs): """Turn off the fan.""" _LOGGER.debug("Turn off fan %s", self.name) self._device.turn_off() def set_dyson_speed(self, speed: str = None) -> None: """Set the exact speed of the purecool fan.""" _LOGGER.debug("Set exact speed for fan %s", self.name) fan_speed = FanSpeed(f"{int(speed):04d}") self._device.set_fan_speed(fan_speed) def oscillate(self, oscillating: bool) -> None: """Turn on/off oscillating.""" _LOGGER.debug("Turn oscillation %s for device %s", oscillating, self.name) if oscillating: self._device.enable_oscillation() else: self._device.disable_oscillation() def set_night_mode(self, night_mode: bool) -> None: """Turn on/off night mode.""" _LOGGER.debug("Turn night mode %s for device %s", night_mode, self.name) if night_mode: self._device.enable_night_mode() else: self._device.disable_night_mode() def set_auto_mode(self, auto_mode: bool) -> None: """Turn auto mode on/off.""" _LOGGER.debug("Turn auto mode %s for device %s", auto_mode, self.name) if auto_mode: self._device.enable_auto_mode() else: self._device.disable_auto_mode() def set_angle(self, angle_low: int, angle_high: int) -> None: """Set device angle.""" _LOGGER.debug( "set low %s and high angle %s for device %s", angle_low, angle_high, self.name, ) self._device.enable_oscillation(angle_low, angle_high) def set_flow_direction_front(self, flow_direction_front: bool) -> None: """Set frontal airflow direction.""" _LOGGER.debug( "Set frontal flow direction to %s for device %s", flow_direction_front, self.name, ) if flow_direction_front: self._device.enable_frontal_direction() else: self._device.disable_frontal_direction() def set_timer(self, timer) -> None: """Set timer.""" _LOGGER.debug("Set timer to %s for device %s", timer, self.name) if timer == 0: self._device.disable_sleep_timer() else: self._device.enable_sleep_timer(timer) @property def oscillating(self): """Return the oscillation state.""" return self._device.state and self._device.state.oscillation == "OION" @property def is_on(self): """Return true if the entity is on.""" if self._device.state: return self._device.state.fan_power == "ON" @property def speed(self): """Return the current speed.""" speed_map = { FanSpeed.FAN_SPEED_1.value: SPEED_LOW, FanSpeed.FAN_SPEED_2.value: SPEED_LOW, FanSpeed.FAN_SPEED_3.value: SPEED_LOW, FanSpeed.FAN_SPEED_4.value: SPEED_LOW, FanSpeed.FAN_SPEED_AUTO.value: SPEED_MEDIUM, FanSpeed.FAN_SPEED_5.value: SPEED_MEDIUM, FanSpeed.FAN_SPEED_6.value: SPEED_MEDIUM, FanSpeed.FAN_SPEED_7.value: SPEED_MEDIUM, FanSpeed.FAN_SPEED_8.value: SPEED_HIGH, FanSpeed.FAN_SPEED_9.value: SPEED_HIGH, FanSpeed.FAN_SPEED_10.value: SPEED_HIGH, } return speed_map[self._device.state.speed] @property def dyson_speed(self): """Return the current speed.""" if self._device.state: if self._device.state.speed == FanSpeed.FAN_SPEED_AUTO.value: return self._device.state.speed return int(self._device.state.speed) @property def night_mode(self): """Return Night mode.""" return self._device.state.night_mode == "ON" @property def auto_mode(self): """Return Auto mode.""" return self._device.state.auto_mode == "ON" @property def angle_low(self): """Return angle high.""" return int(self._device.state.oscillation_angle_low) @property def angle_high(self): """Return angle low.""" return int(self._device.state.oscillation_angle_high) @property def flow_direction_front(self): """Return frontal flow direction.""" return self._device.state.front_direction == "ON" @property def timer(self): """Return timer.""" return self._device.state.sleep_timer @property def hepa_filter(self): """Return the HEPA filter state.""" return int(self._device.state.hepa_filter_state) @property def carbon_filter(self): """Return the carbon filter state.""" if self._device.state.carbon_filter_state == "INV": return self._device.state.carbon_filter_state return int(self._device.state.carbon_filter_state) @property def speed_list(self) -> list: """Get the list of available speeds.""" return [SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH] @property def dyson_speed_list(self) -> list: """Get the list of available dyson speeds.""" return [ int(FanSpeed.FAN_SPEED_1.value), int(FanSpeed.FAN_SPEED_2.value), int(FanSpeed.FAN_SPEED_3.value), int(FanSpeed.FAN_SPEED_4.value), int(FanSpeed.FAN_SPEED_5.value), int(FanSpeed.FAN_SPEED_6.value), int(FanSpeed.FAN_SPEED_7.value), int(FanSpeed.FAN_SPEED_8.value), int(FanSpeed.FAN_SPEED_9.value), int(FanSpeed.FAN_SPEED_10.value), ] @property def device_serial(self): """Return fan's serial number.""" return self._device.serial @property def supported_features(self) -> int: """Flag supported features.""" return SUPPORT_OSCILLATE | SUPPORT_SET_SPEED @property def device_state_attributes(self) -> dict: """Return optional state attributes.""" return { ATTR_NIGHT_MODE: self.night_mode, ATTR_AUTO_MODE: self.auto_mode, ATTR_ANGLE_LOW: self.angle_low, ATTR_ANGLE_HIGH: self.angle_high, ATTR_FLOW_DIRECTION_FRONT: self.flow_direction_front, ATTR_TIMER: self.timer, ATTR_HEPA_FILTER: self.hepa_filter, ATTR_CARBON_FILTER: self.carbon_filter, ATTR_DYSON_SPEED: self.dyson_speed, ATTR_DYSON_SPEED_LIST: self.dyson_speed_list, }
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/dyson/fan.py
"""Typing helpers for ZHA component.""" from typing import TYPE_CHECKING, Callable, TypeVar import zigpy.device import zigpy.endpoint import zigpy.group import zigpy.zcl import zigpy.zdo # pylint: disable=invalid-name CALLABLE_T = TypeVar("CALLABLE_T", bound=Callable) ChannelType = "ZigbeeChannel" ChannelsType = "Channels" ChannelPoolType = "ChannelPool" ClientChannelType = "ClientChannel" ZDOChannelType = "ZDOChannel" ZhaDeviceType = "ZHADevice" ZhaEntityType = "ZHAEntity" ZhaGatewayType = "ZHAGateway" ZhaGroupType = "ZHAGroupType" ZigpyClusterType = zigpy.zcl.Cluster ZigpyDeviceType = zigpy.device.Device ZigpyEndpointType = zigpy.endpoint.Endpoint ZigpyGroupType = zigpy.group.Group ZigpyZdoType = zigpy.zdo.ZDO if TYPE_CHECKING: import homeassistant.components.zha.core.channels import homeassistant.components.zha.core.channels as channels import homeassistant.components.zha.core.channels.base as base_channels import homeassistant.components.zha.core.device import homeassistant.components.zha.core.gateway import homeassistant.components.zha.core.group import homeassistant.components.zha.entity ChannelType = base_channels.ZigbeeChannel ChannelsType = channels.Channels ChannelPoolType = channels.ChannelPool ClientChannelType = base_channels.ClientChannel ZDOChannelType = base_channels.ZDOChannel ZhaDeviceType = homeassistant.components.zha.core.device.ZHADevice ZhaEntityType = homeassistant.components.zha.entity.ZhaEntity ZhaGatewayType = homeassistant.components.zha.core.gateway.ZHAGateway ZhaGroupType = homeassistant.components.zha.core.group.ZHAGroup
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/zha/core/typing.py
"""Errors for the Mikrotik component.""" from homeassistant.exceptions import HomeAssistantError class CannotConnect(HomeAssistantError): """Unable to connect to the hub.""" class LoginError(HomeAssistantError): """Component got logged out."""
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/mikrotik/errors.py
"""Support for w800rf32 binary sensors.""" import logging import W800rf32 as w800 import voluptuous as vol from homeassistant.components.binary_sensor import ( DEVICE_CLASSES_SCHEMA, PLATFORM_SCHEMA, BinarySensorEntity, ) from homeassistant.const import CONF_DEVICE_CLASS, CONF_DEVICES, CONF_NAME from homeassistant.core import callback from homeassistant.helpers import config_validation as cv, event as evt from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.util import dt as dt_util from . import W800RF32_DEVICE _LOGGER = logging.getLogger(__name__) CONF_OFF_DELAY = "off_delay" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_DEVICES): { cv.string: vol.Schema( { vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, vol.Optional(CONF_OFF_DELAY): vol.All( cv.time_period, cv.positive_timedelta ), } ) } }, extra=vol.ALLOW_EXTRA, ) async def async_setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Binary Sensor platform to w800rf32.""" binary_sensors = [] # device_id --> "c1 or a3" X10 device. entity (type dictionary) # --> name, device_class etc for device_id, entity in config[CONF_DEVICES].items(): _LOGGER.debug( "Add %s w800rf32.binary_sensor (class %s)", entity[CONF_NAME], entity.get(CONF_DEVICE_CLASS), ) device = W800rf32BinarySensor( device_id, entity.get(CONF_NAME), entity.get(CONF_DEVICE_CLASS), entity.get(CONF_OFF_DELAY), ) binary_sensors.append(device) add_entities(binary_sensors) class W800rf32BinarySensor(BinarySensorEntity): """A representation of a w800rf32 binary sensor.""" def __init__(self, device_id, name, device_class=None, off_delay=None): """Initialize the w800rf32 sensor.""" self._signal = W800RF32_DEVICE.format(device_id) self._name = name self._device_class = device_class self._off_delay = off_delay self._state = False self._delay_listener = None @callback def _off_delay_listener(self, now): """Switch device off after a delay.""" self._delay_listener = None self.update_state(False) @property def name(self): """Return the device name.""" return self._name @property def should_poll(self): """No polling needed.""" return False @property def device_class(self): """Return the sensor class.""" return self._device_class @property def is_on(self): """Return true if the sensor state is True.""" return self._state @callback def binary_sensor_update(self, event): """Call for control updates from the w800rf32 gateway.""" if not isinstance(event, w800.W800rf32Event): return dev_id = event.device command = event.command _LOGGER.debug( "BinarySensor update (Device ID: %s Command %s ...)", dev_id, command ) # Update the w800rf32 device state if command in ("On", "Off"): is_on = command == "On" self.update_state(is_on) if self.is_on and self._off_delay is not None and self._delay_listener is None: self._delay_listener = evt.async_track_point_in_time( self.hass, self._off_delay_listener, dt_util.utcnow() + self._off_delay ) def update_state(self, state): """Update the state of the device.""" self._state = state self.async_write_ha_state() async def async_added_to_hass(self): """Register update callback.""" async_dispatcher_connect(self.hass, self._signal, self.binary_sensor_update)
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/w800rf32/binary_sensor.py
"""Support for Tado hot water zones.""" import logging import voluptuous as vol from homeassistant.components.water_heater import ( SUPPORT_OPERATION_MODE, SUPPORT_TARGET_TEMPERATURE, WaterHeaterEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.dispatcher import async_dispatcher_connect from .const import ( CONST_HVAC_HEAT, CONST_MODE_AUTO, CONST_MODE_HEAT, CONST_MODE_OFF, CONST_MODE_SMART_SCHEDULE, CONST_OVERLAY_MANUAL, CONST_OVERLAY_TADO_MODE, CONST_OVERLAY_TIMER, DATA, DOMAIN, SIGNAL_TADO_UPDATE_RECEIVED, TYPE_HOT_WATER, ) from .entity import TadoZoneEntity _LOGGER = logging.getLogger(__name__) MODE_AUTO = "auto" MODE_HEAT = "heat" MODE_OFF = "off" OPERATION_MODES = [MODE_AUTO, MODE_HEAT, MODE_OFF] WATER_HEATER_MAP_TADO = { CONST_OVERLAY_MANUAL: MODE_HEAT, CONST_OVERLAY_TIMER: MODE_HEAT, CONST_OVERLAY_TADO_MODE: MODE_HEAT, CONST_HVAC_HEAT: MODE_HEAT, CONST_MODE_SMART_SCHEDULE: MODE_AUTO, CONST_MODE_OFF: MODE_OFF, } SUPPORT_FLAGS_HEATER = SUPPORT_OPERATION_MODE SERVICE_WATER_HEATER_TIMER = "set_water_heater_timer" ATTR_TIME_PERIOD = "time_period" WATER_HEATER_TIMER_SCHEMA = { vol.Required(ATTR_TIME_PERIOD, default="01:00:00"): vol.All( cv.time_period, cv.positive_timedelta, lambda td: td.total_seconds() ), vol.Optional(ATTR_TEMPERATURE): vol.Coerce(float), } async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities ): """Set up the Tado water heater platform.""" tado = hass.data[DOMAIN][entry.entry_id][DATA] entities = await hass.async_add_executor_job(_generate_entities, tado) platform = entity_platform.current_platform.get() platform.async_register_entity_service( SERVICE_WATER_HEATER_TIMER, WATER_HEATER_TIMER_SCHEMA, "set_timer", ) if entities: async_add_entities(entities, True) def _generate_entities(tado): """Create all water heater entities.""" entities = [] for zone in tado.zones: if zone["type"] == TYPE_HOT_WATER: entity = create_water_heater_entity(tado, zone["name"], zone["id"], zone) entities.append(entity) return entities def create_water_heater_entity(tado, name: str, zone_id: int, zone: str): """Create a Tado water heater device.""" capabilities = tado.get_capabilities(zone_id) supports_temperature_control = capabilities["canSetTemperature"] if supports_temperature_control and "temperatures" in capabilities: temperatures = capabilities["temperatures"] min_temp = float(temperatures["celsius"]["min"]) max_temp = float(temperatures["celsius"]["max"]) else: min_temp = None max_temp = None entity = TadoWaterHeater( tado, name, zone_id, supports_temperature_control, min_temp, max_temp, zone["devices"][0], ) return entity class TadoWaterHeater(TadoZoneEntity, WaterHeaterEntity): """Representation of a Tado water heater.""" def __init__( self, tado, zone_name, zone_id, supports_temperature_control, min_temp, max_temp, device_info, ): """Initialize of Tado water heater entity.""" self._tado = tado super().__init__(zone_name, device_info, tado.device_id, zone_id) self.zone_id = zone_id self._unique_id = f"{zone_id} {tado.device_id}" self._device_is_active = False self._supports_temperature_control = supports_temperature_control self._min_temperature = min_temp self._max_temperature = max_temp self._target_temp = None self._supported_features = SUPPORT_FLAGS_HEATER if self._supports_temperature_control: self._supported_features |= SUPPORT_TARGET_TEMPERATURE self._current_tado_hvac_mode = CONST_MODE_SMART_SCHEDULE self._overlay_mode = CONST_MODE_SMART_SCHEDULE self._tado_zone_data = None async def async_added_to_hass(self): """Register for sensor updates.""" self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_TADO_UPDATE_RECEIVED.format( self._tado.device_id, "zone", self.zone_id ), self._async_update_callback, ) ) self._async_update_data() @property def supported_features(self): """Return the list of supported features.""" return self._supported_features @property def name(self): """Return the name of the entity.""" return self.zone_name @property def unique_id(self): """Return the unique id.""" return self._unique_id @property def current_operation(self): """Return current readable operation mode.""" return WATER_HEATER_MAP_TADO.get(self._current_tado_hvac_mode) @property def target_temperature(self): """Return the temperature we try to reach.""" return self._tado_zone_data.target_temp @property def is_away_mode_on(self): """Return true if away mode is on.""" return self._tado_zone_data.is_away @property def operation_list(self): """Return the list of available operation modes (readable).""" return OPERATION_MODES @property def temperature_unit(self): """Return the unit of measurement used by the platform.""" return TEMP_CELSIUS @property def min_temp(self): """Return the minimum temperature.""" return self._min_temperature @property def max_temp(self): """Return the maximum temperature.""" return self._max_temperature def set_operation_mode(self, operation_mode): """Set new operation mode.""" mode = None if operation_mode == MODE_OFF: mode = CONST_MODE_OFF elif operation_mode == MODE_AUTO: mode = CONST_MODE_SMART_SCHEDULE elif operation_mode == MODE_HEAT: mode = CONST_MODE_HEAT self._control_heater(hvac_mode=mode) def set_timer(self, time_period, temperature=None): """Set the timer on the entity, and temperature if supported.""" if not self._supports_temperature_control and temperature is not None: temperature = None self._control_heater( hvac_mode=CONST_MODE_HEAT, target_temp=temperature, duration=time_period ) def set_temperature(self, **kwargs): """Set new target temperature.""" temperature = kwargs.get(ATTR_TEMPERATURE) if not self._supports_temperature_control or temperature is None: return if self._current_tado_hvac_mode not in ( CONST_MODE_OFF, CONST_MODE_AUTO, CONST_MODE_SMART_SCHEDULE, ): self._control_heater(target_temp=temperature) return self._control_heater(target_temp=temperature, hvac_mode=CONST_MODE_HEAT) @callback def _async_update_callback(self): """Load tado data and update state.""" self._async_update_data() self.async_write_ha_state() @callback def _async_update_data(self): """Load tado data.""" _LOGGER.debug("Updating water_heater platform for zone %d", self.zone_id) self._tado_zone_data = self._tado.data["zone"][self.zone_id] self._current_tado_hvac_mode = self._tado_zone_data.current_hvac_mode def _control_heater(self, hvac_mode=None, target_temp=None, duration=None): """Send new target temperature.""" if hvac_mode: self._current_tado_hvac_mode = hvac_mode if target_temp: self._target_temp = target_temp # Set a target temperature if we don't have any if self._target_temp is None: self._target_temp = self.min_temp if self._current_tado_hvac_mode == CONST_MODE_SMART_SCHEDULE: _LOGGER.debug( "Switching to SMART_SCHEDULE for zone %s (%d)", self.zone_name, self.zone_id, ) self._tado.reset_zone_overlay(self.zone_id) return if self._current_tado_hvac_mode == CONST_MODE_OFF: _LOGGER.debug( "Switching to OFF for zone %s (%d)", self.zone_name, self.zone_id ) self._tado.set_zone_off(self.zone_id, CONST_OVERLAY_MANUAL, TYPE_HOT_WATER) return overlay_mode = CONST_OVERLAY_MANUAL if duration: overlay_mode = CONST_OVERLAY_TIMER elif self._tado.fallback: # Fallback to Smart Schedule at next Schedule switch if we have fallback enabled overlay_mode = CONST_OVERLAY_TADO_MODE _LOGGER.debug( "Switching to %s for zone %s (%d) with temperature %s", self._current_tado_hvac_mode, self.zone_name, self.zone_id, self._target_temp, ) self._tado.set_zone_overlay( zone_id=self.zone_id, overlay_mode=overlay_mode, temperature=self._target_temp, duration=duration, device_type=TYPE_HOT_WATER, ) self._overlay_mode = self._current_tado_hvac_mode
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/tado/water_heater.py
"""Demo platform for the cover component.""" from homeassistant.components.cover import ( ATTR_POSITION, ATTR_TILT_POSITION, SUPPORT_CLOSE, SUPPORT_OPEN, CoverEntity, ) from homeassistant.core import callback from homeassistant.helpers.event import async_track_utc_time_change from . import DOMAIN async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Demo covers.""" async_add_entities( [ DemoCover(hass, "cover_1", "Kitchen Window"), DemoCover(hass, "cover_2", "Hall Window", 10), DemoCover(hass, "cover_3", "Living Room Window", 70, 50), DemoCover( hass, "cover_4", "Garage Door", device_class="garage", supported_features=(SUPPORT_OPEN | SUPPORT_CLOSE), ), ] ) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Demo config entry.""" await async_setup_platform(hass, {}, async_add_entities) class DemoCover(CoverEntity): """Representation of a demo cover.""" def __init__( self, hass, unique_id, name, position=None, tilt_position=None, device_class=None, supported_features=None, ): """Initialize the cover.""" self.hass = hass self._unique_id = unique_id self._name = name self._position = position self._device_class = device_class self._supported_features = supported_features self._set_position = None self._set_tilt_position = None self._tilt_position = tilt_position self._requested_closing = True self._requested_closing_tilt = True self._unsub_listener_cover = None self._unsub_listener_cover_tilt = None self._is_opening = False self._is_closing = False if position is None: self._closed = True else: self._closed = self.current_cover_position <= 0 @property def device_info(self): """Return device info.""" return { "identifiers": { # Serial numbers are unique identifiers within a specific domain (DOMAIN, self.unique_id) }, "name": self.name, } @property def unique_id(self): """Return unique ID for cover.""" return self._unique_id @property def name(self): """Return the name of the cover.""" return self._name @property def should_poll(self): """No polling needed for a demo cover.""" return False @property def current_cover_position(self): """Return the current position of the cover.""" return self._position @property def current_cover_tilt_position(self): """Return the current tilt position of the cover.""" return self._tilt_position @property def is_closed(self): """Return if the cover is closed.""" return self._closed @property def is_closing(self): """Return if the cover is closing.""" return self._is_closing @property def is_opening(self): """Return if the cover is opening.""" return self._is_opening @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return self._device_class @property def supported_features(self): """Flag supported features.""" if self._supported_features is not None: return self._supported_features return super().supported_features async def async_close_cover(self, **kwargs): """Close the cover.""" if self._position == 0: return if self._position is None: self._closed = True self.async_write_ha_state() return self._is_closing = True self._listen_cover() self._requested_closing = True self.async_write_ha_state() async def async_close_cover_tilt(self, **kwargs): """Close the cover tilt.""" if self._tilt_position in (0, None): return self._listen_cover_tilt() self._requested_closing_tilt = True async def async_open_cover(self, **kwargs): """Open the cover.""" if self._position == 100: return if self._position is None: self._closed = False self.async_write_ha_state() return self._is_opening = True self._listen_cover() self._requested_closing = False self.async_write_ha_state() async def async_open_cover_tilt(self, **kwargs): """Open the cover tilt.""" if self._tilt_position in (100, None): return self._listen_cover_tilt() self._requested_closing_tilt = False async def async_set_cover_position(self, **kwargs): """Move the cover to a specific position.""" position = kwargs.get(ATTR_POSITION) self._set_position = round(position, -1) if self._position == position: return self._listen_cover() self._requested_closing = position < self._position async def async_set_cover_tilt_position(self, **kwargs): """Move the cover til to a specific position.""" tilt_position = kwargs.get(ATTR_TILT_POSITION) self._set_tilt_position = round(tilt_position, -1) if self._tilt_position == tilt_position: return self._listen_cover_tilt() self._requested_closing_tilt = tilt_position < self._tilt_position async def async_stop_cover(self, **kwargs): """Stop the cover.""" self._is_closing = False self._is_opening = False if self._position is None: return if self._unsub_listener_cover is not None: self._unsub_listener_cover() self._unsub_listener_cover = None self._set_position = None async def async_stop_cover_tilt(self, **kwargs): """Stop the cover tilt.""" if self._tilt_position is None: return if self._unsub_listener_cover_tilt is not None: self._unsub_listener_cover_tilt() self._unsub_listener_cover_tilt = None self._set_tilt_position = None @callback def _listen_cover(self): """Listen for changes in cover.""" if self._unsub_listener_cover is None: self._unsub_listener_cover = async_track_utc_time_change( self.hass, self._time_changed_cover ) async def _time_changed_cover(self, now): """Track time changes.""" if self._requested_closing: self._position -= 10 else: self._position += 10 if self._position in (100, 0, self._set_position): await self.async_stop_cover() self._closed = self.current_cover_position <= 0 self.async_write_ha_state() @callback def _listen_cover_tilt(self): """Listen for changes in cover tilt.""" if self._unsub_listener_cover_tilt is None: self._unsub_listener_cover_tilt = async_track_utc_time_change( self.hass, self._time_changed_cover_tilt ) async def _time_changed_cover_tilt(self, now): """Track time changes.""" if self._requested_closing_tilt: self._tilt_position -= 10 else: self._tilt_position += 10 if self._tilt_position in (100, 0, self._set_tilt_position): await self.async_stop_cover_tilt() self.async_write_ha_state()
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/demo/cover.py
"""Support for Konnected devices.""" import asyncio import logging import konnected from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_STATE, CONF_ACCESS_TOKEN, CONF_BINARY_SENSORS, CONF_DEVICES, CONF_HOST, CONF_ID, CONF_NAME, CONF_PIN, CONF_PORT, CONF_SENSORS, CONF_SWITCHES, CONF_TYPE, CONF_ZONE, ) from homeassistant.core import callback from homeassistant.helpers import aiohttp_client, device_registry as dr from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.network import get_url from .const import ( CONF_ACTIVATION, CONF_API_HOST, CONF_BLINK, CONF_DEFAULT_OPTIONS, CONF_DHT_SENSORS, CONF_DISCOVERY, CONF_DS18B20_SENSORS, CONF_INVERSE, CONF_MOMENTARY, CONF_PAUSE, CONF_POLL_INTERVAL, CONF_REPEAT, DOMAIN, ENDPOINT_ROOT, STATE_LOW, ZONE_TO_PIN, ) from .errors import CannotConnect _LOGGER = logging.getLogger(__name__) KONN_MODEL = "Konnected" KONN_MODEL_PRO = "Konnected Pro" # Indicate how each unit is controlled (pin or zone) KONN_API_VERSIONS = { KONN_MODEL: CONF_PIN, KONN_MODEL_PRO: CONF_ZONE, } class AlarmPanel: """A representation of a Konnected alarm panel.""" def __init__(self, hass, config_entry): """Initialize the Konnected device.""" self.hass = hass self.config_entry = config_entry self.config = config_entry.data self.options = config_entry.options or config_entry.data.get( CONF_DEFAULT_OPTIONS, {} ) self.host = self.config.get(CONF_HOST) self.port = self.config.get(CONF_PORT) self.client = None self.status = None self.api_version = KONN_API_VERSIONS[KONN_MODEL] self.connected = False self.connect_attempts = 0 self.cancel_connect_retry = None @property def device_id(self): """Device id is the chipId (pro) or MAC address as string with punctuation removed.""" return self.config.get(CONF_ID) @property def stored_configuration(self): """Return the configuration stored in `hass.data` for this device.""" return self.hass.data[DOMAIN][CONF_DEVICES].get(self.device_id) @property def available(self): """Return whether the device is available.""" return self.connected def format_zone(self, zone, other_items=None): """Get zone or pin based dict based on the client type.""" payload = { self.api_version: zone if self.api_version == CONF_ZONE else ZONE_TO_PIN[zone] } payload.update(other_items or {}) return payload async def async_connect(self, now=None): """Connect to and setup a Konnected device.""" if self.connected: return if self.cancel_connect_retry: # cancel any pending connect attempt and try now self.cancel_connect_retry() try: self.client = konnected.Client( host=self.host, port=str(self.port), websession=aiohttp_client.async_get_clientsession(self.hass), ) self.status = await self.client.get_status() self.api_version = KONN_API_VERSIONS.get( self.status.get("model", KONN_MODEL), KONN_API_VERSIONS[KONN_MODEL] ) _LOGGER.info( "Connected to new %s device", self.status.get("model", "Konnected") ) _LOGGER.debug(self.status) await self.async_update_initial_states() # brief delay to allow processing of recent status req await asyncio.sleep(0.1) await self.async_sync_device_config() except self.client.ClientError as err: _LOGGER.warning("Exception trying to connect to panel: %s", err) # retry in a bit, never more than ~3 min self.connect_attempts += 1 self.cancel_connect_retry = self.hass.helpers.event.async_call_later( 2 ** min(self.connect_attempts, 5) * 5, self.async_connect ) return self.connect_attempts = 0 self.connected = True _LOGGER.info( "Set up Konnected device %s. Open http://%s:%s in a " "web browser to view device status", self.device_id, self.host, self.port, ) device_registry = await dr.async_get_registry(self.hass) device_registry.async_get_or_create( config_entry_id=self.config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, self.status.get("mac"))}, identifiers={(DOMAIN, self.device_id)}, manufacturer="Konnected.io", name=self.config_entry.title, model=self.config_entry.title, sw_version=self.status.get("swVersion"), ) async def update_switch(self, zone, state, momentary=None, times=None, pause=None): """Update the state of a switchable output.""" try: if self.client: if self.api_version == CONF_ZONE: return await self.client.put_zone( zone, state, momentary, times, pause, ) # device endpoint uses pin number instead of zone return await self.client.put_device( ZONE_TO_PIN[zone], state, momentary, times, pause, ) except self.client.ClientError as err: _LOGGER.warning("Exception trying to update panel: %s", err) raise CannotConnect async def async_save_data(self): """Save the device configuration to `hass.data`.""" binary_sensors = {} for entity in self.options.get(CONF_BINARY_SENSORS) or []: zone = entity[CONF_ZONE] binary_sensors[zone] = { CONF_TYPE: entity[CONF_TYPE], CONF_NAME: entity.get( CONF_NAME, f"Konnected {self.device_id[6:]} Zone {zone}" ), CONF_INVERSE: entity.get(CONF_INVERSE), ATTR_STATE: None, } _LOGGER.debug( "Set up binary_sensor %s (initial state: %s)", binary_sensors[zone].get("name"), binary_sensors[zone].get(ATTR_STATE), ) actuators = [] for entity in self.options.get(CONF_SWITCHES) or []: zone = entity[CONF_ZONE] act = { CONF_ZONE: zone, CONF_NAME: entity.get( CONF_NAME, f"Konnected {self.device_id[6:]} Actuator {zone}", ), ATTR_STATE: None, CONF_ACTIVATION: entity[CONF_ACTIVATION], CONF_MOMENTARY: entity.get(CONF_MOMENTARY), CONF_PAUSE: entity.get(CONF_PAUSE), CONF_REPEAT: entity.get(CONF_REPEAT), } actuators.append(act) _LOGGER.debug("Set up switch %s", act) sensors = [] for entity in self.options.get(CONF_SENSORS) or []: zone = entity[CONF_ZONE] sensor = { CONF_ZONE: zone, CONF_NAME: entity.get( CONF_NAME, f"Konnected {self.device_id[6:]} Sensor {zone}" ), CONF_TYPE: entity[CONF_TYPE], CONF_POLL_INTERVAL: entity.get(CONF_POLL_INTERVAL), } sensors.append(sensor) _LOGGER.debug( "Set up %s sensor %s (initial state: %s)", sensor.get(CONF_TYPE), sensor.get(CONF_NAME), sensor.get(ATTR_STATE), ) device_data = { CONF_BINARY_SENSORS: binary_sensors, CONF_SENSORS: sensors, CONF_SWITCHES: actuators, CONF_BLINK: self.options.get(CONF_BLINK), CONF_DISCOVERY: self.options.get(CONF_DISCOVERY), CONF_HOST: self.host, CONF_PORT: self.port, "panel": self, } if CONF_DEVICES not in self.hass.data[DOMAIN]: self.hass.data[DOMAIN][CONF_DEVICES] = {} _LOGGER.debug( "Storing data in hass.data[%s][%s][%s]: %s", DOMAIN, CONF_DEVICES, self.device_id, device_data, ) self.hass.data[DOMAIN][CONF_DEVICES][self.device_id] = device_data @callback def async_binary_sensor_configuration(self): """Return the configuration map for syncing binary sensors.""" return [ self.format_zone(p) for p in self.stored_configuration[CONF_BINARY_SENSORS] ] @callback def async_actuator_configuration(self): """Return the configuration map for syncing actuators.""" return [ self.format_zone( data[CONF_ZONE], {"trigger": (0 if data.get(CONF_ACTIVATION) in [0, STATE_LOW] else 1)}, ) for data in self.stored_configuration[CONF_SWITCHES] ] @callback def async_dht_sensor_configuration(self): """Return the configuration map for syncing DHT sensors.""" return [ self.format_zone( sensor[CONF_ZONE], {CONF_POLL_INTERVAL: sensor[CONF_POLL_INTERVAL]} ) for sensor in self.stored_configuration[CONF_SENSORS] if sensor[CONF_TYPE] == "dht" ] @callback def async_ds18b20_sensor_configuration(self): """Return the configuration map for syncing DS18B20 sensors.""" return [ self.format_zone(sensor[CONF_ZONE]) for sensor in self.stored_configuration[CONF_SENSORS] if sensor[CONF_TYPE] == "ds18b20" ] async def async_update_initial_states(self): """Update the initial state of each sensor from status poll.""" for sensor_data in self.status.get("sensors"): sensor_config = self.stored_configuration[CONF_BINARY_SENSORS].get( sensor_data.get(CONF_ZONE, sensor_data.get(CONF_PIN)), {} ) entity_id = sensor_config.get(ATTR_ENTITY_ID) state = bool(sensor_data.get(ATTR_STATE)) if sensor_config.get(CONF_INVERSE): state = not state async_dispatcher_send(self.hass, f"konnected.{entity_id}.update", state) @callback def async_desired_settings_payload(self): """Return a dict representing the desired device configuration.""" # keeping self.hass.data check for backwards compatibility # newly configured integrations store this in the config entry desired_api_host = self.options.get(CONF_API_HOST) or ( self.hass.data[DOMAIN].get(CONF_API_HOST) or get_url(self.hass) ) desired_api_endpoint = desired_api_host + ENDPOINT_ROOT return { "sensors": self.async_binary_sensor_configuration(), "actuators": self.async_actuator_configuration(), "dht_sensors": self.async_dht_sensor_configuration(), "ds18b20_sensors": self.async_ds18b20_sensor_configuration(), "auth_token": self.config.get(CONF_ACCESS_TOKEN), "endpoint": desired_api_endpoint, "blink": self.options.get(CONF_BLINK, True), "discovery": self.options.get(CONF_DISCOVERY, True), } @callback def async_current_settings_payload(self): """Return a dict of configuration currently stored on the device.""" settings = self.status["settings"] if not settings: settings = {} return { "sensors": [ {self.api_version: s[self.api_version]} for s in self.status.get("sensors") ], "actuators": self.status.get("actuators"), "dht_sensors": self.status.get(CONF_DHT_SENSORS), "ds18b20_sensors": self.status.get(CONF_DS18B20_SENSORS), "auth_token": settings.get("token"), "endpoint": settings.get("endpoint"), "blink": settings.get(CONF_BLINK), "discovery": settings.get(CONF_DISCOVERY), } async def async_sync_device_config(self): """Sync the new zone configuration to the Konnected device if needed.""" _LOGGER.debug( "Device %s settings payload: %s", self.device_id, self.async_desired_settings_payload(), ) if ( self.async_desired_settings_payload() != self.async_current_settings_payload() ): _LOGGER.info("pushing settings to device %s", self.device_id) await self.client.put_settings(**self.async_desired_settings_payload()) async def get_status(hass, host, port): """Get the status of a Konnected Panel.""" client = konnected.Client( host, str(port), aiohttp_client.async_get_clientsession(hass) ) try: return await client.get_status() except client.ClientError as err: _LOGGER.error("Exception trying to get panel status: %s", err) raise CannotConnect from err
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/konnected/panel.py
"""Config flow for Universal Devices ISY994 integration.""" import logging from urllib.parse import urlparse from pyisy.configuration import Configuration from pyisy.connection import Connection import voluptuous as vol from homeassistant import config_entries, core, exceptions from homeassistant.components import ssdp from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback from .const import ( CONF_IGNORE_STRING, CONF_RESTORE_LIGHT_STATE, CONF_SENSOR_STRING, CONF_TLS_VER, CONF_VAR_SENSOR_STRING, DEFAULT_IGNORE_STRING, DEFAULT_RESTORE_LIGHT_STATE, DEFAULT_SENSOR_STRING, DEFAULT_TLS_VERSION, DEFAULT_VAR_SENSOR_STRING, ISY_URL_POSTFIX, UDN_UUID_PREFIX, ) from .const import DOMAIN # pylint:disable=unused-import _LOGGER = logging.getLogger(__name__) def _data_schema(schema_input): """Generate schema with defaults.""" return vol.Schema( { vol.Required(CONF_HOST, default=schema_input.get(CONF_HOST, "")): str, vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, vol.Optional(CONF_TLS_VER, default=DEFAULT_TLS_VERSION): vol.In([1.1, 1.2]), }, extra=vol.ALLOW_EXTRA, ) async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. """ user = data[CONF_USERNAME] password = data[CONF_PASSWORD] host = urlparse(data[CONF_HOST]) tls_version = data.get(CONF_TLS_VER) if host.scheme == "http": https = False port = host.port or 80 elif host.scheme == "https": https = True port = host.port or 443 else: _LOGGER.error("isy994 host value in configuration is invalid") raise InvalidHost # Connect to ISY controller. isy_conf = await hass.async_add_executor_job( _fetch_isy_configuration, host.hostname, port, user, password, https, tls_version, host.path, ) if not isy_conf or "name" not in isy_conf or not isy_conf["name"]: raise CannotConnect # Return info that you want to store in the config entry. return {"title": f"{isy_conf['name']} ({host.hostname})", "uuid": isy_conf["uuid"]} def _fetch_isy_configuration( address, port, username, password, use_https, tls_ver, webroot ): """Validate and fetch the configuration from the ISY.""" try: isy_conn = Connection( address, port, username, password, use_https, tls_ver, webroot=webroot, ) except ValueError as err: raise InvalidAuth(err.args[0]) from err return Configuration(xml=isy_conn.get_config()) class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Universal Devices ISY994.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH def __init__(self): """Initialize the isy994 config flow.""" self.discovered_conf = {} @staticmethod @callback def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return OptionsFlowHandler(config_entry) async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} info = None if user_input is not None: try: info = await validate_input(self.hass, user_input) except CannotConnect: errors["base"] = "cannot_connect" except InvalidHost: errors["base"] = "invalid_host" except InvalidAuth: errors["base"] = "invalid_auth" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" if not errors: await self.async_set_unique_id(info["uuid"], raise_on_progress=False) self._abort_if_unique_id_configured() return self.async_create_entry(title=info["title"], data=user_input) return self.async_show_form( step_id="user", data_schema=_data_schema(self.discovered_conf), errors=errors, ) async def async_step_import(self, user_input): """Handle import.""" return await self.async_step_user(user_input) async def async_step_ssdp(self, discovery_info): """Handle a discovered isy994.""" friendly_name = discovery_info[ssdp.ATTR_UPNP_FRIENDLY_NAME] url = discovery_info[ssdp.ATTR_SSDP_LOCATION] mac = discovery_info[ssdp.ATTR_UPNP_UDN] if mac.startswith(UDN_UUID_PREFIX): mac = mac[len(UDN_UUID_PREFIX) :] if url.endswith(ISY_URL_POSTFIX): url = url[: -len(ISY_URL_POSTFIX)] await self.async_set_unique_id(mac) self._abort_if_unique_id_configured() self.discovered_conf = { CONF_NAME: friendly_name, CONF_HOST: url, } # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 self.context["title_placeholders"] = self.discovered_conf return await self.async_step_user() class OptionsFlowHandler(config_entries.OptionsFlow): """Handle a option flow for isy994.""" def __init__(self, config_entry: config_entries.ConfigEntry): """Initialize options flow.""" self.config_entry = config_entry async def async_step_init(self, user_input=None): """Handle options flow.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) options = self.config_entry.options restore_light_state = options.get( CONF_RESTORE_LIGHT_STATE, DEFAULT_RESTORE_LIGHT_STATE ) ignore_string = options.get(CONF_IGNORE_STRING, DEFAULT_IGNORE_STRING) sensor_string = options.get(CONF_SENSOR_STRING, DEFAULT_SENSOR_STRING) var_sensor_string = options.get( CONF_VAR_SENSOR_STRING, DEFAULT_VAR_SENSOR_STRING ) options_schema = vol.Schema( { vol.Optional(CONF_IGNORE_STRING, default=ignore_string): str, vol.Optional(CONF_SENSOR_STRING, default=sensor_string): str, vol.Optional(CONF_VAR_SENSOR_STRING, default=var_sensor_string): str, vol.Required( CONF_RESTORE_LIGHT_STATE, default=restore_light_state ): bool, } ) return self.async_show_form(step_id="init", data_schema=options_schema) class InvalidHost(exceptions.HomeAssistantError): """Error to indicate the host value is invalid.""" class CannotConnect(exceptions.HomeAssistantError): """Error to indicate we cannot connect.""" class InvalidAuth(exceptions.HomeAssistantError): """Error to indicate there is invalid auth."""
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/isy994/config_flow.py
"""Support for departure information for Rhein-Main public transport.""" import asyncio from datetime import timedelta import logging from RMVtransport import RMVtransport from RMVtransport.rmvtransport import RMVtransportApiConnectionError import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME, TIME_MINUTES from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) CONF_NEXT_DEPARTURE = "next_departure" CONF_STATION = "station" CONF_DESTINATIONS = "destinations" CONF_DIRECTION = "direction" CONF_LINES = "lines" CONF_PRODUCTS = "products" CONF_TIME_OFFSET = "time_offset" CONF_MAX_JOURNEYS = "max_journeys" CONF_TIMEOUT = "timeout" DEFAULT_NAME = "RMV Journey" VALID_PRODUCTS = ["U-Bahn", "Tram", "Bus", "S", "RB", "RE", "EC", "IC", "ICE"] ICONS = { "U-Bahn": "mdi:subway", "Tram": "mdi:tram", "Bus": "mdi:bus", "S": "mdi:train", "RB": "mdi:train", "RE": "mdi:train", "EC": "mdi:train", "IC": "mdi:train", "ICE": "mdi:train", "SEV": "mdi:checkbox-blank-circle-outline", None: "mdi:clock", } ATTRIBUTION = "Data provided by opendata.rmv.de" SCAN_INTERVAL = timedelta(seconds=60) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_NEXT_DEPARTURE): [ { vol.Required(CONF_STATION): cv.string, vol.Optional(CONF_DESTINATIONS, default=[]): vol.All( cv.ensure_list, [cv.string] ), vol.Optional(CONF_DIRECTION): cv.string, vol.Optional(CONF_LINES, default=[]): vol.All( cv.ensure_list, [cv.positive_int, cv.string] ), vol.Optional(CONF_PRODUCTS, default=VALID_PRODUCTS): vol.All( cv.ensure_list, [vol.In(VALID_PRODUCTS)] ), vol.Optional(CONF_TIME_OFFSET, default=0): cv.positive_int, vol.Optional(CONF_MAX_JOURNEYS, default=5): cv.positive_int, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ], vol.Optional(CONF_TIMEOUT, default=10): cv.positive_int, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the RMV departure sensor.""" timeout = config.get(CONF_TIMEOUT) session = async_get_clientsession(hass) sensors = [] for next_departure in config.get(CONF_NEXT_DEPARTURE): sensors.append( RMVDepartureSensor( session, next_departure[CONF_STATION], next_departure.get(CONF_DESTINATIONS), next_departure.get(CONF_DIRECTION), next_departure.get(CONF_LINES), next_departure.get(CONF_PRODUCTS), next_departure.get(CONF_TIME_OFFSET), next_departure.get(CONF_MAX_JOURNEYS), next_departure.get(CONF_NAME), timeout, ) ) tasks = [sensor.async_update() for sensor in sensors] if tasks: await asyncio.wait(tasks) if not any(sensor.data for sensor in sensors): raise PlatformNotReady async_add_entities(sensors) class RMVDepartureSensor(Entity): """Implementation of an RMV departure sensor.""" def __init__( self, session, station, destinations, direction, lines, products, time_offset, max_journeys, name, timeout, ): """Initialize the sensor.""" self._station = station self._name = name self._state = None self.data = RMVDepartureData( session, station, destinations, direction, lines, products, time_offset, max_journeys, timeout, ) self._icon = ICONS[None] @property def name(self): """Return the name of the sensor.""" return self._name @property def available(self): """Return True if entity is available.""" return self._state is not None @property def state(self): """Return the next departure time.""" return self._state @property def state_attributes(self): """Return the state attributes.""" try: return { "next_departures": self.data.departures[1:], "direction": self.data.departures[0].get("direction"), "line": self.data.departures[0].get("line"), "minutes": self.data.departures[0].get("minutes"), "departure_time": self.data.departures[0].get("departure_time"), "product": self.data.departures[0].get("product"), ATTR_ATTRIBUTION: ATTRIBUTION, } except IndexError: return {} @property def icon(self): """Icon to use in the frontend, if any.""" return self._icon @property def unit_of_measurement(self): """Return the unit this state is expressed in.""" return TIME_MINUTES async def async_update(self): """Get the latest data and update the state.""" await self.data.async_update() if self._name == DEFAULT_NAME: self._name = self.data.station self._station = self.data.station if not self.data.departures: self._state = None self._icon = ICONS[None] return self._state = self.data.departures[0].get("minutes") self._icon = ICONS[self.data.departures[0].get("product")] class RMVDepartureData: """Pull data from the opendata.rmv.de web page.""" def __init__( self, session, station_id, destinations, direction, lines, products, time_offset, max_journeys, timeout, ): """Initialize the sensor.""" self.station = None self._station_id = station_id self._destinations = destinations self._direction = direction self._lines = lines self._products = products self._time_offset = time_offset self._max_journeys = max_journeys self.rmv = RMVtransport(session, timeout) self.departures = [] self._error_notification = False @Throttle(SCAN_INTERVAL) async def async_update(self): """Update the connection data.""" try: _data = await self.rmv.get_departures( self._station_id, products=self._products, direction_id=self._direction, max_journeys=50, ) except RMVtransportApiConnectionError: self.departures = [] _LOGGER.warning("Could not retrieve data from rmv.de") return self.station = _data.get("station") _deps = [] _deps_not_found = set(self._destinations) for journey in _data["journeys"]: # find the first departure meeting the criteria _nextdep = {} if self._destinations: dest_found = False for dest in self._destinations: if dest in journey["stops"]: dest_found = True if dest in _deps_not_found: _deps_not_found.remove(dest) _nextdep["destination"] = dest if not dest_found: continue if self._lines and journey["number"] not in self._lines: continue if journey["minutes"] < self._time_offset: continue for attr in ["direction", "departure_time", "product", "minutes"]: _nextdep[attr] = journey.get(attr, "") _nextdep["line"] = journey.get("number", "") _deps.append(_nextdep) if len(_deps) > self._max_journeys: break if not self._error_notification and _deps_not_found: self._error_notification = True _LOGGER.info("Destination(s) %s not found", ", ".join(_deps_not_found)) self.departures = _deps
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/rmvtransport/sensor.py
"""Initialization of ATAG One climate platform.""" from typing import List, Optional from homeassistant.components.climate import ClimateEntity from homeassistant.components.climate.const import ( CURRENT_HVAC_HEAT, CURRENT_HVAC_IDLE, HVAC_MODE_AUTO, HVAC_MODE_HEAT, PRESET_AWAY, PRESET_BOOST, SUPPORT_PRESET_MODE, SUPPORT_TARGET_TEMPERATURE, ) from homeassistant.const import ATTR_TEMPERATURE from . import CLIMATE, DOMAIN, AtagEntity PRESET_SCHEDULE = "Auto" PRESET_MANUAL = "Manual" PRESET_EXTEND = "Extend" SUPPORT_PRESET = [ PRESET_MANUAL, PRESET_SCHEDULE, PRESET_EXTEND, PRESET_AWAY, PRESET_BOOST, ] SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE HVAC_MODES = [HVAC_MODE_AUTO, HVAC_MODE_HEAT] async def async_setup_entry(hass, entry, async_add_entities): """Load a config entry.""" coordinator = hass.data[DOMAIN][entry.entry_id] async_add_entities([AtagThermostat(coordinator, CLIMATE)]) class AtagThermostat(AtagEntity, ClimateEntity): """Atag climate device.""" @property def supported_features(self): """Return the list of supported features.""" return SUPPORT_FLAGS @property def hvac_mode(self) -> Optional[str]: """Return hvac operation ie. heat, cool mode.""" if self.coordinator.atag.climate.hvac_mode in HVAC_MODES: return self.coordinator.atag.climate.hvac_mode return None @property def hvac_modes(self) -> List[str]: """Return the list of available hvac operation modes.""" return HVAC_MODES @property def hvac_action(self) -> Optional[str]: """Return the current running hvac operation.""" if self.coordinator.atag.climate.status: return CURRENT_HVAC_HEAT return CURRENT_HVAC_IDLE @property def temperature_unit(self): """Return the unit of measurement.""" return self.coordinator.atag.climate.temp_unit @property def current_temperature(self) -> Optional[float]: """Return the current temperature.""" return self.coordinator.atag.climate.temperature @property def target_temperature(self) -> Optional[float]: """Return the temperature we try to reach.""" return self.coordinator.atag.climate.target_temperature @property def preset_mode(self) -> Optional[str]: """Return the current preset mode, e.g., auto, manual, fireplace, extend, etc.""" return self.coordinator.atag.climate.preset_mode @property def preset_modes(self) -> Optional[List[str]]: """Return a list of available preset modes.""" return SUPPORT_PRESET async def async_set_temperature(self, **kwargs) -> None: """Set new target temperature.""" await self.coordinator.atag.climate.set_temp(kwargs.get(ATTR_TEMPERATURE)) self.async_write_ha_state() async def async_set_hvac_mode(self, hvac_mode: str) -> None: """Set new target hvac mode.""" await self.coordinator.atag.climate.set_hvac_mode(hvac_mode) self.async_write_ha_state() async def async_set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" await self.coordinator.atag.climate.set_preset_mode(preset_mode) self.async_write_ha_state()
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/atag/climate.py
"""Config flow for BleBox devices integration.""" import logging from blebox_uniapi.error import Error, UnsupportedBoxVersion from blebox_uniapi.products import Products from blebox_uniapi.session import ApiHost import voluptuous as vol from homeassistant import config_entries from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import ( ADDRESS_ALREADY_CONFIGURED, CANNOT_CONNECT, DEFAULT_HOST, DEFAULT_PORT, DEFAULT_SETUP_TIMEOUT, DOMAIN, UNKNOWN, UNSUPPORTED_VERSION, ) _LOGGER = logging.getLogger(__name__) def host_port(data): """Return a list with host and port.""" return (data[CONF_HOST], data[CONF_PORT]) def create_schema(previous_input=None): """Create a schema with given values as default.""" if previous_input is not None: host, port = host_port(previous_input) else: host = DEFAULT_HOST port = DEFAULT_PORT return vol.Schema( { vol.Required(CONF_HOST, default=host): str, vol.Required(CONF_PORT, default=port): int, } ) LOG_MSG = { UNSUPPORTED_VERSION: "Outdated firmware", CANNOT_CONNECT: "Failed to identify device", UNKNOWN: "Unknown error while identifying device", } class BleBoxConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for BleBox devices.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize the BleBox config flow.""" self.device_config = {} def handle_step_exception( self, step, exception, schema, host, port, message_id, log_fn ): """Handle step exceptions.""" log_fn("%s at %s:%d (%s)", LOG_MSG[message_id], host, port, exception) return self.async_show_form( step_id="user", data_schema=schema, errors={"base": message_id}, description_placeholders={"address": f"{host}:{port}"}, ) async def async_step_user(self, user_input=None): """Handle initial user-triggered config step.""" hass = self.hass schema = create_schema(user_input) if user_input is None: return self.async_show_form( step_id="user", data_schema=schema, errors={}, description_placeholders={}, ) addr = host_port(user_input) for entry in hass.config_entries.async_entries(DOMAIN): if addr == host_port(entry.data): host, port = addr return self.async_abort( reason=ADDRESS_ALREADY_CONFIGURED, description_placeholders={"address": f"{host}:{port}"}, ) websession = async_get_clientsession(hass) api_host = ApiHost(*addr, DEFAULT_SETUP_TIMEOUT, websession, hass.loop, _LOGGER) try: product = await Products.async_from_host(api_host) except UnsupportedBoxVersion as ex: return self.handle_step_exception( "user", ex, schema, *addr, UNSUPPORTED_VERSION, _LOGGER.debug ) except Error as ex: return self.handle_step_exception( "user", ex, schema, *addr, CANNOT_CONNECT, _LOGGER.warning ) except RuntimeError as ex: return self.handle_step_exception( "user", ex, schema, *addr, UNKNOWN, _LOGGER.error ) # Check if configured but IP changed since await self.async_set_unique_id(product.unique_id) self._abort_if_unique_id_configured() return self.async_create_entry(title=product.name, data=user_input)
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/blebox/config_flow.py
"""Support for INSTEON Modems (PLM and Hub).""" import asyncio import logging from pyinsteon import async_close, async_connect, devices from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import CONF_PLATFORM, EVENT_HOMEASSISTANT_STOP from homeassistant.exceptions import ConfigEntryNotReady from .const import ( CONF_CAT, CONF_DIM_STEPS, CONF_HOUSECODE, CONF_OVERRIDE, CONF_SUBCAT, CONF_UNITCODE, CONF_X10, DOMAIN, INSTEON_COMPONENTS, ON_OFF_EVENTS, ) from .schemas import convert_yaml_to_config_flow from .utils import ( add_on_off_event_device, async_register_services, get_device_platforms, register_new_device_callback, ) _LOGGER = logging.getLogger(__name__) OPTIONS = "options" async def async_get_device_config(hass, config_entry): """Initiate the connection and services.""" # Make a copy of addresses due to edge case where the list of devices could change during status update # Cannot be done concurrently due to issues with the underlying protocol. for address in list(devices): try: await devices[address].async_status() except AttributeError: pass await devices.async_load(id_devices=1) for addr in devices: device = devices[addr] flags = True for name in device.operating_flags: if not device.operating_flags[name].is_loaded: flags = False break if flags: for name in device.properties: if not device.properties[name].is_loaded: flags = False break # Cannot be done concurrently due to issues with the underlying protocol. if not device.aldb.is_loaded or not flags: await device.async_read_config() await devices.async_save(workdir=hass.config.config_dir) async def close_insteon_connection(*args): """Close the Insteon connection.""" await async_close() async def async_setup(hass, config): """Set up the Insteon platform.""" if DOMAIN not in config: return True conf = config[DOMAIN] data, options = convert_yaml_to_config_flow(conf) if options: hass.data[DOMAIN] = {} hass.data[DOMAIN][OPTIONS] = options # Create a config entry with the connection data hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=data ) ) return True async def async_setup_entry(hass, entry): """Set up an Insteon entry.""" if not devices.modem: try: await async_connect(**entry.data) except ConnectionError as exception: _LOGGER.error("Could not connect to Insteon modem") raise ConfigEntryNotReady from exception hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, close_insteon_connection) await devices.async_load( workdir=hass.config.config_dir, id_devices=0, load_modem_aldb=0 ) # If options existed in YAML and have not already been saved to the config entry # add them now if ( not entry.options and entry.source == SOURCE_IMPORT and hass.data.get(DOMAIN) and hass.data[DOMAIN].get(OPTIONS) ): hass.config_entries.async_update_entry( entry=entry, options=hass.data[DOMAIN][OPTIONS], ) for device_override in entry.options.get(CONF_OVERRIDE, []): # Override the device default capabilities for a specific address address = device_override.get("address") if not devices.get(address): cat = device_override[CONF_CAT] subcat = device_override[CONF_SUBCAT] devices.set_id(address, cat, subcat, 0) for device in entry.options.get(CONF_X10, []): housecode = device.get(CONF_HOUSECODE) unitcode = device.get(CONF_UNITCODE) x10_type = "on_off" steps = device.get(CONF_DIM_STEPS, 22) if device.get(CONF_PLATFORM) == "light": x10_type = "dimmable" elif device.get(CONF_PLATFORM) == "binary_sensor": x10_type = "sensor" _LOGGER.debug( "Adding X10 device to Insteon: %s %d %s", housecode, unitcode, x10_type ) device = devices.add_x10_device(housecode, unitcode, x10_type, steps) for component in INSTEON_COMPONENTS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) for address in devices: device = devices[address] platforms = get_device_platforms(device) if ON_OFF_EVENTS in platforms: add_on_off_event_device(hass, device) _LOGGER.debug("Insteon device count: %s", len(devices)) register_new_device_callback(hass) async_register_services(hass) device_registry = await hass.helpers.device_registry.async_get_registry() device_registry.async_get_or_create( config_entry_id=entry.entry_id, identifiers={(DOMAIN, str(devices.modem.address))}, manufacturer="Smart Home", name=f"{devices.modem.description} {devices.modem.address}", model=f"{devices.modem.model} ({devices.modem.cat!r}, 0x{devices.modem.subcat:02x})", sw_version=f"{devices.modem.firmware:02x} Engine Version: {devices.modem.engine_version}", ) asyncio.create_task(async_get_device_config(hass, entry)) return True
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/insteon/__init__.py
"""The Rollease Acmeda Automate integration.""" import asyncio from homeassistant import config_entries, core from .const import DOMAIN from .hub import PulseHub CONF_HUBS = "hubs" PLATFORMS = ["cover", "sensor"] async def async_setup(hass: core.HomeAssistant, config: dict): """Set up the Rollease Acmeda Automate component.""" return True async def async_setup_entry( hass: core.HomeAssistant, config_entry: config_entries.ConfigEntry ): """Set up Rollease Acmeda Automate hub from a config entry.""" hub = PulseHub(hass, config_entry) if not await hub.async_setup(): return False hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][config_entry.entry_id] = hub for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, component) ) return True async def async_unload_entry( hass: core.HomeAssistant, config_entry: config_entries.ConfigEntry ): """Unload a config entry.""" hub = hass.data[DOMAIN][config_entry.entry_id] unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(config_entry, component) for component in PLATFORMS ] ) ) if not await hub.async_reset(): return False if unload_ok: hass.data[DOMAIN].pop(config_entry.entry_id) return unload_ok
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/acmeda/__init__.py
"""Support for the KIWI.KI lock platform.""" import logging from kiwiki import KiwiClient, KiwiException import voluptuous as vol from homeassistant.components.lock import PLATFORM_SCHEMA, LockEntity from homeassistant.const import ( ATTR_ID, ATTR_LATITUDE, ATTR_LONGITUDE, CONF_PASSWORD, CONF_USERNAME, STATE_LOCKED, STATE_UNLOCKED, ) from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.event import async_call_later _LOGGER = logging.getLogger(__name__) ATTR_TYPE = "hardware_type" ATTR_PERMISSION = "permission" ATTR_CAN_INVITE = "can_invite_others" UNLOCK_MAINTAIN_TIME = 5 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string} ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the KIWI lock platform.""" try: kiwi = KiwiClient(config[CONF_USERNAME], config[CONF_PASSWORD]) except KiwiException as exc: _LOGGER.error(exc) return available_locks = kiwi.get_locks() if not available_locks: # No locks found; abort setup routine. _LOGGER.info("No KIWI locks found in your account") return add_entities([KiwiLock(lock, kiwi) for lock in available_locks], True) class KiwiLock(LockEntity): """Representation of a Kiwi lock.""" def __init__(self, kiwi_lock, client): """Initialize the lock.""" self._sensor = kiwi_lock self._client = client self.lock_id = kiwi_lock["sensor_id"] self._state = STATE_LOCKED address = kiwi_lock.get("address") address.update( { ATTR_LATITUDE: address.pop("lat", None), ATTR_LONGITUDE: address.pop("lng", None), } ) self._device_attrs = { ATTR_ID: self.lock_id, ATTR_TYPE: kiwi_lock.get("hardware_type"), ATTR_PERMISSION: kiwi_lock.get("highest_permission"), ATTR_CAN_INVITE: kiwi_lock.get("can_invite"), **address, } @property def name(self): """Return the name of the lock.""" name = self._sensor.get("name") specifier = self._sensor["address"].get("specifier") return name or specifier @property def is_locked(self): """Return true if lock is locked.""" return self._state == STATE_LOCKED @property def device_state_attributes(self): """Return the device specific state attributes.""" return self._device_attrs @callback def clear_unlock_state(self, _): """Clear unlock state automatically.""" self._state = STATE_LOCKED self.async_write_ha_state() def unlock(self, **kwargs): """Unlock the device.""" try: self._client.open_door(self.lock_id) except KiwiException: _LOGGER.error("failed to open door") else: self._state = STATE_UNLOCKED self.hass.add_job( async_call_later, self.hass, UNLOCK_MAINTAIN_TIME, self.clear_unlock_state, )
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/kiwi/lock.py
"""Support for French FAI Bouygues Bbox routers.""" from collections import namedtuple from datetime import timedelta import logging from typing import List import pybbox import voluptuous as vol from homeassistant.components.device_tracker import ( DOMAIN, PLATFORM_SCHEMA, DeviceScanner, ) from homeassistant.const import CONF_HOST import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) DEFAULT_HOST = "192.168.1.254" MIN_TIME_BETWEEN_SCANS = timedelta(seconds=60) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string} ) def get_scanner(hass, config): """Validate the configuration and return a Bbox scanner.""" scanner = BboxDeviceScanner(config[DOMAIN]) return scanner if scanner.success_init else None Device = namedtuple("Device", ["mac", "name", "ip", "last_update"]) class BboxDeviceScanner(DeviceScanner): """This class scans for devices connected to the bbox.""" def __init__(self, config): """Get host from config.""" self.host = config[CONF_HOST] """Initialize the scanner.""" self.last_results: List[Device] = [] self.success_init = self._update_info() _LOGGER.info("Scanner initialized") def scan_devices(self): """Scan for new devices and return a list with found device IDs.""" self._update_info() return [device.mac for device in self.last_results] def get_device_name(self, device): """Return the name of the given device or None if we don't know.""" filter_named = [ result.name for result in self.last_results if result.mac == device ] if filter_named: return filter_named[0] return None @Throttle(MIN_TIME_BETWEEN_SCANS) def _update_info(self): """Check the Bbox for devices. Returns boolean if scanning successful. """ _LOGGER.info("Scanning...") box = pybbox.Bbox(ip=self.host) result = box.get_all_connected_devices() now = dt_util.now() last_results = [] for device in result: if device["active"] != 1: continue last_results.append( Device( device["macaddress"], device["hostname"], device["ipaddress"], now ) ) self.last_results = last_results _LOGGER.info("Scan successful") return True
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/bbox/device_tracker.py
"""Handle MySensors devices.""" from functools import partial import logging from homeassistant.const import ATTR_BATTERY_LEVEL, STATE_OFF, STATE_ON from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity from .const import CHILD_CALLBACK, NODE_CALLBACK, UPDATE_DELAY _LOGGER = logging.getLogger(__name__) ATTR_CHILD_ID = "child_id" ATTR_DESCRIPTION = "description" ATTR_DEVICE = "device" ATTR_NODE_ID = "node_id" ATTR_HEARTBEAT = "heartbeat" MYSENSORS_PLATFORM_DEVICES = "mysensors_devices_{}" def get_mysensors_devices(hass, domain): """Return MySensors devices for a platform.""" if MYSENSORS_PLATFORM_DEVICES.format(domain) not in hass.data: hass.data[MYSENSORS_PLATFORM_DEVICES.format(domain)] = {} return hass.data[MYSENSORS_PLATFORM_DEVICES.format(domain)] class MySensorsDevice: """Representation of a MySensors device.""" def __init__(self, gateway, node_id, child_id, name, value_type): """Set up the MySensors device.""" self.gateway = gateway self.node_id = node_id self.child_id = child_id self._name = name self.value_type = value_type child = gateway.sensors[node_id].children[child_id] self.child_type = child.type self._values = {} self._update_scheduled = False self.hass = None @property def name(self): """Return the name of this entity.""" return self._name @property def device_state_attributes(self): """Return device specific state attributes.""" node = self.gateway.sensors[self.node_id] child = node.children[self.child_id] attr = { ATTR_BATTERY_LEVEL: node.battery_level, ATTR_HEARTBEAT: node.heartbeat, ATTR_CHILD_ID: self.child_id, ATTR_DESCRIPTION: child.description, ATTR_DEVICE: self.gateway.device, ATTR_NODE_ID: self.node_id, } set_req = self.gateway.const.SetReq for value_type, value in self._values.items(): attr[set_req(value_type).name] = value return attr async def async_update(self): """Update the controller with the latest value from a sensor.""" node = self.gateway.sensors[self.node_id] child = node.children[self.child_id] set_req = self.gateway.const.SetReq for value_type, value in child.values.items(): _LOGGER.debug( "Entity update: %s: value_type %s, value = %s", self._name, value_type, value, ) if value_type in ( set_req.V_ARMED, set_req.V_LIGHT, set_req.V_LOCK_STATUS, set_req.V_TRIPPED, ): self._values[value_type] = STATE_ON if int(value) == 1 else STATE_OFF elif value_type == set_req.V_DIMMER: self._values[value_type] = int(value) else: self._values[value_type] = value async def _async_update_callback(self): """Update the device.""" raise NotImplementedError @callback def async_update_callback(self): """Update the device after delay.""" if self._update_scheduled: return async def update(): """Perform update.""" try: await self._async_update_callback() except Exception: # pylint: disable=broad-except _LOGGER.exception("Error updating %s", self.name) finally: self._update_scheduled = False self._update_scheduled = True delayed_update = partial(self.hass.async_create_task, update()) self.hass.loop.call_later(UPDATE_DELAY, delayed_update) class MySensorsEntity(MySensorsDevice, Entity): """Representation of a MySensors entity.""" @property def should_poll(self): """Return the polling state. The gateway pushes its states.""" return False @property def available(self): """Return true if entity is available.""" return self.value_type in self._values async def _async_update_callback(self): """Update the entity.""" await self.async_update_ha_state(True) async def async_added_to_hass(self): """Register update callback.""" gateway_id = id(self.gateway) dev_id = gateway_id, self.node_id, self.child_id, self.value_type self.async_on_remove( async_dispatcher_connect( self.hass, CHILD_CALLBACK.format(*dev_id), self.async_update_callback ) ) self.async_on_remove( async_dispatcher_connect( self.hass, NODE_CALLBACK.format(gateway_id, self.node_id), self.async_update_callback, ) )
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/mysensors/device.py
"""BleBox sensor entities.""" from homeassistant.helpers.entity import Entity from . import BleBoxEntity, create_blebox_entities from .const import BLEBOX_TO_HASS_DEVICE_CLASSES, BLEBOX_TO_UNIT_MAP async def async_setup_entry(hass, config_entry, async_add_entities): """Set up a BleBox entry.""" create_blebox_entities( hass, config_entry, async_add_entities, BleBoxSensorEntity, "sensors" ) class BleBoxSensorEntity(BleBoxEntity, Entity): """Representation of a BleBox sensor feature.""" @property def state(self): """Return the state.""" return self._feature.current @property def unit_of_measurement(self): """Return the unit.""" return BLEBOX_TO_UNIT_MAP[self._feature.unit] @property def device_class(self): """Return the device class.""" return BLEBOX_TO_HASS_DEVICE_CLASSES[self._feature.device_class]
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/blebox/sensor.py
"""Support for the Daikin HVAC.""" import logging import voluptuous as vol from homeassistant.components.climate import PLATFORM_SCHEMA, ClimateEntity from homeassistant.components.climate.const import ( ATTR_FAN_MODE, ATTR_HVAC_MODE, ATTR_PRESET_MODE, ATTR_SWING_MODE, HVAC_MODE_COOL, HVAC_MODE_DRY, HVAC_MODE_FAN_ONLY, HVAC_MODE_HEAT, HVAC_MODE_HEAT_COOL, HVAC_MODE_OFF, PRESET_AWAY, PRESET_BOOST, PRESET_ECO, PRESET_NONE, SUPPORT_FAN_MODE, SUPPORT_PRESET_MODE, SUPPORT_SWING_MODE, SUPPORT_TARGET_TEMPERATURE, ) from homeassistant.const import ATTR_TEMPERATURE, CONF_HOST, CONF_NAME, TEMP_CELSIUS import homeassistant.helpers.config_validation as cv from . import DOMAIN as DAIKIN_DOMAIN from .const import ( ATTR_INSIDE_TEMPERATURE, ATTR_OUTSIDE_TEMPERATURE, ATTR_STATE_OFF, ATTR_STATE_ON, ATTR_TARGET_TEMPERATURE, ) _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME): cv.string} ) HA_STATE_TO_DAIKIN = { HVAC_MODE_FAN_ONLY: "fan", HVAC_MODE_DRY: "dry", HVAC_MODE_COOL: "cool", HVAC_MODE_HEAT: "hot", HVAC_MODE_HEAT_COOL: "auto", HVAC_MODE_OFF: "off", } DAIKIN_TO_HA_STATE = { "fan": HVAC_MODE_FAN_ONLY, "dry": HVAC_MODE_DRY, "cool": HVAC_MODE_COOL, "hot": HVAC_MODE_HEAT, "auto": HVAC_MODE_HEAT_COOL, "off": HVAC_MODE_OFF, } HA_PRESET_TO_DAIKIN = { PRESET_AWAY: "on", PRESET_NONE: "off", PRESET_BOOST: "powerful", PRESET_ECO: "econo", } HA_ATTR_TO_DAIKIN = { ATTR_PRESET_MODE: "en_hol", ATTR_HVAC_MODE: "mode", ATTR_FAN_MODE: "f_rate", ATTR_SWING_MODE: "f_dir", ATTR_INSIDE_TEMPERATURE: "htemp", ATTR_OUTSIDE_TEMPERATURE: "otemp", ATTR_TARGET_TEMPERATURE: "stemp", } DAIKIN_ATTR_ADVANCED = "adv" async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Old way of setting up the Daikin HVAC platform. Can only be called when a user accidentally mentions the platform in their config. But even in that case it would have been ignored. """ async def async_setup_entry(hass, entry, async_add_entities): """Set up Daikin climate based on config_entry.""" daikin_api = hass.data[DAIKIN_DOMAIN].get(entry.entry_id) async_add_entities([DaikinClimate(daikin_api)], update_before_add=True) class DaikinClimate(ClimateEntity): """Representation of a Daikin HVAC.""" def __init__(self, api): """Initialize the climate device.""" self._api = api self._list = { ATTR_HVAC_MODE: list(HA_STATE_TO_DAIKIN), ATTR_FAN_MODE: self._api.device.fan_rate, ATTR_SWING_MODE: self._api.device.swing_modes, } self._supported_features = SUPPORT_TARGET_TEMPERATURE if ( self._api.device.support_away_mode or self._api.device.support_advanced_modes ): self._supported_features |= SUPPORT_PRESET_MODE if self._api.device.support_fan_rate: self._supported_features |= SUPPORT_FAN_MODE if self._api.device.support_swing_mode: self._supported_features |= SUPPORT_SWING_MODE async def _set(self, settings): """Set device settings using API.""" values = {} for attr in [ATTR_TEMPERATURE, ATTR_FAN_MODE, ATTR_SWING_MODE, ATTR_HVAC_MODE]: value = settings.get(attr) if value is None: continue daikin_attr = HA_ATTR_TO_DAIKIN.get(attr) if daikin_attr is not None: if attr == ATTR_HVAC_MODE: values[daikin_attr] = HA_STATE_TO_DAIKIN[value] elif value in self._list[attr]: values[daikin_attr] = value.lower() else: _LOGGER.error("Invalid value %s for %s", attr, value) # temperature elif attr == ATTR_TEMPERATURE: try: values[HA_ATTR_TO_DAIKIN[ATTR_TARGET_TEMPERATURE]] = str(int(value)) except ValueError: _LOGGER.error("Invalid temperature %s", value) if values: await self._api.device.set(values) @property def supported_features(self): """Return the list of supported features.""" return self._supported_features @property def name(self): """Return the name of the thermostat, if any.""" return self._api.name @property def unique_id(self): """Return a unique ID.""" return self._api.device.mac @property def temperature_unit(self): """Return the unit of measurement which this thermostat uses.""" return TEMP_CELSIUS @property def current_temperature(self): """Return the current temperature.""" return self._api.device.inside_temperature @property def target_temperature(self): """Return the temperature we try to reach.""" return self._api.device.target_temperature @property def target_temperature_step(self): """Return the supported step of target temperature.""" return 1 async def async_set_temperature(self, **kwargs): """Set new target temperature.""" await self._set(kwargs) @property def hvac_mode(self): """Return current operation ie. heat, cool, idle.""" daikin_mode = self._api.device.represent(HA_ATTR_TO_DAIKIN[ATTR_HVAC_MODE])[1] return DAIKIN_TO_HA_STATE.get(daikin_mode, HVAC_MODE_HEAT_COOL) @property def hvac_modes(self): """Return the list of available operation modes.""" return self._list.get(ATTR_HVAC_MODE) async def async_set_hvac_mode(self, hvac_mode): """Set HVAC mode.""" await self._set({ATTR_HVAC_MODE: hvac_mode}) @property def fan_mode(self): """Return the fan setting.""" return self._api.device.represent(HA_ATTR_TO_DAIKIN[ATTR_FAN_MODE])[1].title() async def async_set_fan_mode(self, fan_mode): """Set fan mode.""" await self._set({ATTR_FAN_MODE: fan_mode}) @property def fan_modes(self): """List of available fan modes.""" return self._list.get(ATTR_FAN_MODE) @property def swing_mode(self): """Return the fan setting.""" return self._api.device.represent(HA_ATTR_TO_DAIKIN[ATTR_SWING_MODE])[1].title() async def async_set_swing_mode(self, swing_mode): """Set new target temperature.""" await self._set({ATTR_SWING_MODE: swing_mode}) @property def swing_modes(self): """List of available swing modes.""" return self._list.get(ATTR_SWING_MODE) @property def preset_mode(self): """Return the preset_mode.""" if ( self._api.device.represent(HA_ATTR_TO_DAIKIN[ATTR_PRESET_MODE])[1] == HA_PRESET_TO_DAIKIN[PRESET_AWAY] ): return PRESET_AWAY if ( HA_PRESET_TO_DAIKIN[PRESET_BOOST] in self._api.device.represent(DAIKIN_ATTR_ADVANCED)[1] ): return PRESET_BOOST if ( HA_PRESET_TO_DAIKIN[PRESET_ECO] in self._api.device.represent(DAIKIN_ATTR_ADVANCED)[1] ): return PRESET_ECO return PRESET_NONE async def async_set_preset_mode(self, preset_mode): """Set preset mode.""" if preset_mode == PRESET_AWAY: await self._api.device.set_holiday(ATTR_STATE_ON) elif preset_mode == PRESET_BOOST: await self._api.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_BOOST], ATTR_STATE_ON ) elif preset_mode == PRESET_ECO: await self._api.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_ECO], ATTR_STATE_ON ) else: if self.preset_mode == PRESET_AWAY: await self._api.device.set_holiday(ATTR_STATE_OFF) elif self.preset_mode == PRESET_BOOST: await self._api.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_BOOST], ATTR_STATE_OFF ) elif self.preset_mode == PRESET_ECO: await self._api.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_ECO], ATTR_STATE_OFF ) @property def preset_modes(self): """List of available preset modes.""" ret = [PRESET_NONE] if self._api.device.support_away_mode: ret.append(PRESET_AWAY) if self._api.device.support_advanced_modes: ret += [PRESET_ECO, PRESET_BOOST] return ret async def async_update(self): """Retrieve latest state.""" await self._api.async_update() async def async_turn_on(self): """Turn device on.""" await self._api.device.set({}) async def async_turn_off(self): """Turn device off.""" await self._api.device.set( {HA_ATTR_TO_DAIKIN[ATTR_HVAC_MODE]: HA_STATE_TO_DAIKIN[HVAC_MODE_OFF]} ) @property def device_info(self): """Return a device description for device registry.""" return self._api.device_info
"""Define tests for the PlayStation 4 config flow.""" from pyps4_2ndscreen.errors import CredentialTimeout import pytest from homeassistant import data_entry_flow from homeassistant.components import ps4 from homeassistant.components.ps4.const import ( DEFAULT_ALIAS, DEFAULT_NAME, DEFAULT_REGION, DOMAIN, ) from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN, ) from homeassistant.util import location from tests.async_mock import patch from tests.common import MockConfigEntry MOCK_TITLE = "PlayStation 4" MOCK_CODE = 12345678 MOCK_CODE_LEAD_0 = 1234567 MOCK_CODE_LEAD_0_STR = "01234567" MOCK_CREDS = "000aa000" MOCK_HOST = "192.0.0.0" MOCK_HOST_ADDITIONAL = "192.0.0.1" MOCK_DEVICE = { CONF_HOST: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_DEVICE_ADDITIONAL = { CONF_HOST: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, } MOCK_CONFIG = { CONF_IP_ADDRESS: MOCK_HOST, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_CONFIG_ADDITIONAL = { CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL, CONF_NAME: DEFAULT_NAME, CONF_REGION: DEFAULT_REGION, CONF_CODE: MOCK_CODE, } MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]} MOCK_UDP_PORT = int(987) MOCK_TCP_PORT = int(997) MOCK_AUTO = {"Config Mode": "Auto Discover"} MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST} MOCK_LOCATION = location.LocationInfo( "0.0.0.0", "US", "United States", "CA", "California", "San Diego", "92122", "America/Los_Angeles", 32.8594, -117.2073, True, ) @pytest.fixture(name="location_info", autouse=True) def location_info_fixture(): """Mock location info.""" with patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): yield @pytest.fixture(name="ps4_setup", autouse=True) def ps4_setup_fixture(): """Patch ps4 setup entry.""" with patch( "homeassistant.components.ps4.async_setup_entry", return_value=True, ): yield async def test_full_flow_implementation(hass): """Test registering an implementation and flow works.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE async def test_multiple_flow_implementation(hass): """Test multiple device flows.""" # User Step Started, results in Step Creds with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # User Input results in created entry. with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert result["data"]["devices"] == [MOCK_DEVICE] assert result["title"] == MOCK_TITLE # Check if entry exists. entries = hass.config_entries.async_entries() assert len(entries) == 1 # Check if there is a device config in entry. entry_1 = entries[0] assert len(entry_1.data["devices"]) == 1 # Test additional flow. # User Step Started, results in Step Mode: with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" # Step Creds results with form in Step Mode. with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input which is not manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" # Step Link with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE # Check if there are 2 entries. entries = hass.config_entries.async_entries() assert len(entries) == 2 # Check if there is device config in the last entry. entry_2 = entries[-1] assert len(entry_2.data["devices"]) == 1 # Check that entry 1 is different from entry 2. assert entry_1 is not entry_2 async def test_port_bind_abort(hass): """Test that flow aborted when cannot bind to ports 987, 997.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT): reason = "port_987_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT): reason = "port_997_bind_error" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == reason async def test_duplicate_abort(hass): """Test that Flow aborts when found devices already configured.""" MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_additional_device(hass): """Test that Flow can configure another device.""" # Mock existing entry. entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA) entry.add_to_hass(hass) with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}], ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_TOKEN] == MOCK_CREDS assert len(result["data"]["devices"]) == 1 assert result["title"] == MOCK_TITLE async def test_0_pin(hass): """Test Pin with leading '0' is passed correctly.""" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "creds"}, data={}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ), patch( "homeassistant.components.ps4.config_flow.location.async_detect_location_info", return_value=MOCK_LOCATION, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" mock_config = MOCK_CONFIG mock_config[CONF_CODE] = MOCK_CODE_LEAD_0 with patch( "pyps4_2ndscreen.Helper.link", return_value=(True, True) ) as mock_call, patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], mock_config ) mock_call.assert_called_once_with( MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS ) async def test_no_devices_found_abort(hass): """Test that failure to find devices aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "no_devices_found" async def test_manual_mode(hass): """Test host specified in manual mode is passed to Step Link.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" # Step Mode with User Input: manual, results in Step Link. with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_MANUAL ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" async def test_credential_abort(hass): """Test that failure to get credentials aborts flow.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "credential_error" async def test_credential_timeout(hass): """Test that Credential Timeout shows error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" assert result["errors"] == {"base": "credential_timeout"} async def test_wrong_pin_error(hass): """Test that incorrect pin throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "login_failed"} async def test_device_connection_error(hass): """Test that device not connected or on throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" with patch( "pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}] ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_AUTO ) with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "link" assert result["errors"] == {"base": "cannot_connect"} async def test_manual_mode_no_ip_error(hass): """Test no IP specified in manual mode throws an error.""" with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "user"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "creds" with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"Config Mode": "Manual Entry"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "mode" assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
sdague/home-assistant
tests/components/ps4/test_config_flow.py
homeassistant/components/daikin/climate.py
from devito.ir.equations.equation import * # noqa from devito.ir.equations.algorithms import * # noqa
import pytest import numpy as np from devito import (Grid, Function, TimeFunction, SparseTimeFunction, Dimension, # noqa Eq, Operator, ALLOC_GUARD, ALLOC_FLAT, configuration, switchconfig) from devito.data import LEFT, RIGHT, Decomposition, loc_data_idx, convert_index from devito.tools import as_tuple from devito.types import Scalar from devito.data.allocators import ExternalAllocator class TestDataBasic(object): def test_simple_indexing(self): """Test data packing/unpacking via basic indexing.""" grid = Grid(shape=(16, 16, 16)) u = Function(name='yu3D', grid=grid, space_order=0) # Test simple insertion and extraction u.data[0, 1, 1] = 1. assert u.data[0, 0, 0] == 0. assert u.data[0, 1, 1] == 1. assert np.all(u.data == u.data[:, :, :]) assert 1. in u.data[0] assert 1. in u.data[0, 1] # Test negative indices assert u.data[0, -15, -15] == 1. u.data[6, 0, 0] = 1. assert u.data[-10, :, :].sum() == 1. # Test setting whole array to given value u.data[:] = 3. assert np.all(u.data == 3.) # Test insertion of single value into block u.data[5, :, 5] = 5. assert np.all(u.data[5, :, 5] == 5.) # Test extraction of block with negative indices sliced = u.data[-11, :, -11] assert sliced.shape == (16,) assert np.all(sliced == 5.) # Test insertion of block into block block = np.ndarray(shape=(1, 16, 1), dtype=np.float32) block.fill(4.) u.data[4:5, :, 4:5] = block assert np.all(u.data[4, :, 4] == block) def test_advanced_indexing(self): """Test data packing/unpacking via advanced indexing.""" grid = Grid(shape=(4, 4, 4)) u = TimeFunction(name='yu4D', grid=grid, space_order=0, time_order=1) u.data[:] = 0. # Test slicing w/ negative indices, combined to explicit indexing u.data[1, 1:-1, 1:-1, 1:-1] = 6. assert np.all(u.data[0] == 0.) assert np.all(u.data[1, 1:-1, 1:-1, 1:-1] == 6.) assert np.all(u.data[1, :, 0] == 0.) assert np.all(u.data[1, :, -1] == 0.) assert np.all(u.data[1, :, :, 0] == 0.) assert np.all(u.data[1, :, :, -1] == 0.) def test_negative_step(self): """Test slicing with a negative step.""" grid = Grid(shape=(6, 6, 6)) u = TimeFunction(name='u', grid=grid, dtype=np.int32) u.data[:] = 0. dat = np.array([1, 2, 3, 4, 5, 6]) u.data[0, :, 0, 0] = dat assert (np.array(u.data[0, 3::-1, 0, 0]) == dat[3::-1]).all() assert (np.array(u.data[0, 5:1:-1, 0, 0]) == dat[5:1:-1]).all() def test_negative_start(self): """Test slicing with a negative start.""" grid = Grid(shape=(13,)) f = Function(name='f', grid=grid) idx = slice(-4, None, 1) dat = np.array([1, 2, 3, 4]) f.data[idx] = dat assert np.all(np.array(f.data[9:]) == dat) def test_halo_indexing(self): """Test data packing/unpacking in presence of a halo region.""" domain_shape = (16, 16, 16) grid = Grid(shape=domain_shape) u = Function(name='yu3D', grid=grid, space_order=2) assert u.shape == u.data.shape == domain_shape assert u._shape_with_inhalo == u.data_with_halo.shape == (20, 20, 20) assert u.shape_with_halo == u._shape_with_inhalo # W/o MPI, these two coincide # Test simple insertion and extraction u.data_with_halo[0, 0, 0] = 1. u.data[0, 0, 0] = 2. assert u.data_with_halo[0, 0, 0] == 1. assert u.data[0, 0, 0] == 2. assert u.data_with_halo[2, 2, 2] == 2. # Test negative indices u.data_with_halo[-1, -1, -1] = 3. assert u.data[-1, -1, -1] == 0. assert u.data_with_halo[-1, -1, -1] == 3. def test_broadcasting(self): """ Test Data broadcasting, expected to behave as NumPy broadcasting. Notes ----- Refer to https://docs.scipy.org/doc/numpy-1.15.0/user/basics.broadcasting.html for more info about NumPy broadcasting rules. """ grid = Grid(shape=(4, 4, 4)) u = Function(name='yu3D', grid=grid) u.data[:] = 2. # Assign from array with lower-dimensional shape v = np.ones(shape=(4, 4), dtype=u.dtype) u.data[:] = v assert np.all(u.data == 1.) # Assign from array with higher-dimensional shape causes a ValueError exception v = np.zeros(shape=(4, 4, 4, 4), dtype=u.dtype) try: u.data[:] = v except ValueError: assert True except: assert False # Assign from array having shape with some 1-valued entries v = np.zeros(shape=(4, 1, 4), dtype=u.dtype) u.data[:] = v assert np.all(u.data == 0.) def test_arithmetic(self): """Test arithmetic operations involving Data objects.""" grid = Grid(shape=(16, 16, 16)) u = Function(name='yu3D', grid=grid, space_order=0) u.data[:] = 1 # Simple arithmetic assert np.all(u.data == 1) assert np.all(u.data + 2. == 3.) assert np.all(u.data - 2. == -1.) assert np.all(u.data * 2. == 2.) assert np.all(u.data / 2. == 0.5) assert np.all(u.data % 2 == 1.) # Increments and partial increments u.data[:] += 2. assert np.all(u.data == 3.) u.data[9, :, :] += 1. assert all(np.all(u.data[i, :, :] == 3.) for i in range(9)) assert np.all(u.data[9, :, :] == 4.) # Right operations __rOP__ u.data[:] = 1. arr = np.ndarray(shape=(16, 16, 16), dtype=np.float32) arr.fill(2.) assert np.all(arr - u.data == 1.) def test_illegal_indexing(self): """Tests that indexing into illegal entries throws an exception.""" nt = 5 grid = Grid(shape=(4, 4, 4)) u = Function(name='u', grid=grid) v = TimeFunction(name='v', grid=grid, save=nt) try: u.data[5] assert False except IndexError: pass try: v.data[nt] assert False except IndexError: pass def test_logic_indexing(self): """Test logic indexing along stepping dimensions.""" grid = Grid(shape=(4, 4, 4)) v_mod = TimeFunction(name='v_mod', grid=grid) v_mod.data[0] = 1. v_mod.data[1] = 2. assert np.all(v_mod.data[0] == 1.) assert np.all(v_mod.data[1] == 2.) assert np.all(v_mod.data[2] == v_mod.data[0]) assert np.all(v_mod.data[4] == v_mod.data[0]) assert np.all(v_mod.data[3] == v_mod.data[1]) assert np.all(v_mod.data[-1] == v_mod.data[1]) assert np.all(v_mod.data[-2] == v_mod.data[0]) def test_indexing_into_sparse(self): """ Test indexing into SparseFunctions. """ grid = Grid(shape=(4, 4)) sf = SparseTimeFunction(name='sf', grid=grid, npoint=1, nt=10) sf.data[1:-1, 0] = np.arange(8) assert np.all(sf.data[1:-1, 0] == np.arange(8)) class TestLocDataIDX(object): """ Test the support function loc_data_idx. """ @pytest.mark.parametrize('idx, expected', [ ('(slice(10, None, -1), slice(11, None, -3))', '(slice(0, 11, 1), slice(2, 12, 3))'), ('(2, 5)', '(slice(2, 3, 1), slice(5, 6, 1))') ]) def test_loc_data_idx(self, idx, expected): """ Test loc_data_idx located in devito/data/utils.py """ idx = eval(idx) expected = eval(expected) result = loc_data_idx(idx) assert result == expected class TestMetaData(object): """ Test correctness of metadata describing size and offset of the various data regions, such as DOMAIN, HALO, etc. """ def test_wo_halo_wo_padding(self): grid = Grid(shape=(4, 4, 4)) u = Function(name='u', grid=grid, space_order=0, padding=0) assert u.shape == u._shape_with_inhalo == u.shape_allocated assert u.shape_with_halo == u._shape_with_inhalo # W/o MPI, these two coincide assert u._size_halo == u._size_owned == u._size_padding ==\ ((0, 0), (0, 0), (0, 0)) assert u._offset_domain == (0, 0, 0) assert u._offset_halo == u._offset_owned == ((0, 4), (0, 4), (0, 4)) def test_w_halo_wo_padding(self): grid = Grid(shape=(4, 4, 4)) u = Function(name='u', grid=grid, space_order=2, padding=0) assert len(u.shape) == len(u._size_halo.left) assert u._size_halo == u._size_owned == ((2, 2), (2, 2), (2, 2)) assert u._offset_domain == (2, 2, 2) assert u._offset_halo == ((0, 6), (0, 6), (0, 6)) assert u._offset_owned == ((2, 4), (2, 4), (2, 4)) assert tuple(i + j*2 for i, j in zip(u.shape, u._size_halo.left)) ==\ u.shape_with_halo # Try with different grid shape and space_order grid2 = Grid(shape=(3, 3, 3)) u2 = Function(name='u2', grid=grid2, space_order=4, padding=0) assert u2.shape == (3, 3, 3) assert u2._offset_domain == (4, 4, 4) assert u2._offset_halo == ((0, 7), (0, 7), (0, 7)) assert tuple(i + j*2 for i, j in zip(u2.shape, u2._size_halo.left)) ==\ u2.shape_with_halo assert u2.shape_with_halo == (11, 11, 11) def test_wo_halo_w_padding(self): grid = Grid(shape=(4, 4, 4)) u = Function(name='u', grid=grid, space_order=2, padding=((1, 1), (3, 3), (4, 4))) assert tuple(i + j + k for i, (j, k) in zip(u.shape_with_halo, u._padding)) ==\ u.shape_allocated assert u._halo == ((2, 2), (2, 2), (2, 2)) assert u._size_padding == ((1, 1), (3, 3), (4, 4)) assert u._size_padding.left == u._size_padding.right == (1, 3, 4) assert u._size_nodomain == ((3, 3), (5, 5), (6, 6)) assert u._size_nodomain.left == u._size_nodomain.right == (3, 5, 6) assert u._size_nopad == (8, 8, 8) assert u._offset_domain == (3, 5, 6) assert u._offset_halo == ((1, 7), (3, 9), (4, 10)) assert u._offset_halo.left == (1, 3, 4) assert u._offset_halo.right == (7, 9, 10) assert u._offset_owned == ((3, 5), (5, 7), (6, 8)) def test_w_halo_w_padding(self): grid = Grid(shape=(4, 4, 4)) u = Function(name='u', grid=grid, space_order=(2, 1, 4), padding=((1, 1), (2, 2), (3, 3))) assert u._size_halo == ((1, 4), (1, 4), (1, 4)) assert u._size_owned == ((4, 1), (4, 1), (4, 1)) assert u._size_nodomain == ((2, 5), (3, 6), (4, 7)) assert u._size_nodomain.left == (2, 3, 4) assert u._size_nodomain.right == (5, 6, 7) assert u._size_nopad == (9, 9, 9) assert u._offset_domain == (2, 3, 4) assert u._offset_halo == ((1, 6), (2, 7), (3, 8)) assert u._offset_owned == ((2, 5), (3, 6), (4, 7)) @switchconfig(autopadding=True, platform='bdw') # Platform is to fix pad value def test_w_halo_w_autopadding(self): grid = Grid(shape=(4, 4, 4)) u0 = Function(name='u0', grid=grid, space_order=0) u1 = Function(name='u1', grid=grid, space_order=3) assert configuration['platform'].simd_items_per_reg(u1.dtype) == 8 assert u0._size_halo == ((0, 0), (0, 0), (0, 0)) assert u0._size_padding == ((0, 0), (0, 0), (0, 12)) assert u0._size_nodomain == u0._size_padding assert u0.shape_allocated == (4, 4, 16) assert u1._size_halo == ((3, 3), (3, 3), (3, 3)) assert u1._size_padding == ((0, 0), (0, 0), (0, 14)) # 14 stems from 6 + 8 assert u1._size_nodomain == ((3, 3), (3, 3), (3, 17)) assert u1.shape_allocated == (10, 10, 24) class TestDecomposition(object): """ Notes ----- If these tests don't work, there is no chance that the tests in TestDataDistributed will pass. """ def test_glb_to_loc_index_conversions(self): d = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 2) # A global index as single argument assert d.index_glb_to_loc(5) == 0 assert d.index_glb_to_loc(6) == 1 assert d.index_glb_to_loc(7) == 2 assert d.index_glb_to_loc(3) is None # Retrieve relative local min/man given global min/max assert d.index_glb_to_loc((5, 7)) == (0, 2) assert d.index_glb_to_loc((5, 9)) == (0, 2) assert d.index_glb_to_loc((1, 3)) == (-1, -3) assert d.index_glb_to_loc((1, 6)) == (0, 1) assert d.index_glb_to_loc((None, None)) == (0, 2) # Retrieve absolute local min/man given global min/max assert d.index_glb_to_loc((5, 7), rel=False) == (5, 7) assert d.index_glb_to_loc((5, 9), rel=False) == (5, 7) assert d.index_glb_to_loc((1, 3), rel=False) == (-1, -3) assert d.index_glb_to_loc((1, 6), rel=False) == (5, 6) assert d.index_glb_to_loc((None, None), rel=False) == (5, 7) def test_glb_to_loc_w_side(self): d = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 2) # A global index as single argument assert d.index_glb_to_loc(5, LEFT) == 0 assert d.index_glb_to_loc(6, RIGHT) == 2 assert d.index_glb_to_loc(7, LEFT) == 2 assert d.index_glb_to_loc(4, RIGHT) == 0 assert d.index_glb_to_loc(6, LEFT) == 1 assert d.index_glb_to_loc(5, RIGHT) == 1 assert d.index_glb_to_loc(2, LEFT) is None assert d.index_glb_to_loc(3, RIGHT) is None def test_loc_to_glb_index_conversions(self): d = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 2) # Convert local indices to global indices assert d.index_loc_to_glb((0, 2)) == (5, 7) d2 = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 0) assert d2.index_loc_to_glb((0, 2)) == (0, 2) d3 = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 3) assert d3.index_loc_to_glb((1, 3)) == (9, 11) def test_convert_index(self): d0 = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 2) d1 = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 3) decomposition = (d0, d1) idx0 = (5, slice(8, 11, 1)) result0 = [] for i, j in zip(idx0, decomposition): result0.append(convert_index(i, j)) expected0 = (0, slice(0, 3, 1)) assert as_tuple(result0) == expected0 def test_reshape_identity(self): d = Decomposition([[0, 1], [2, 3]], 2) # Identity decomposition assert len(d.reshape(0, 0)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(0, 0), [[0, 1], [2, 3]])) def test_reshape_right_only(self): d = Decomposition([[0, 1], [2, 3]], 2) # Extension at right only assert len(d.reshape(0, 2)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(0, 2), [[0, 1], [2, 3, 4, 5]])) # Reduction at right affecting one sub-domain only, but not the whole subdomain assert len(d.reshape(0, -1)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(0, -1), [[0, 1], [2]])) # Reduction at right over one whole sub-domain assert len(d.reshape(0, -2)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(0, -2), [[0, 1], []])) # Reduction at right over multiple sub-domains assert len(d.reshape(0, -3)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(0, -3), [[0], []])) def test_reshape_left_only(self): d = Decomposition([[0, 1], [2, 3]], 2) # Extension at left only assert len(d.reshape(2, 0)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(2, 0), [[0, 1, 2, 3], [4, 5]])) # Reduction at left affecting one sub-domain only, but not the whole subdomain assert len(d.reshape(-1, 0)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(-1, 0), [[0], [1, 2]])) # Reduction at left over one whole sub-domain assert len(d.reshape(-2, 0)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(-2, 0), [[], [0, 1]])) # Reduction at right over multiple sub-domains assert len(d.reshape(-3, 0)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(-3, 0), [[], [0]])) def test_reshape_left_right(self): d = Decomposition([[0, 1], [2, 3]], 2) # Extension at both left and right assert len(d.reshape(1, 1)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(1, 1), [[0, 1, 2], [3, 4, 5]])) # Reduction at both left and right assert len(d.reshape(-1, -1)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(-1, -1), [[0], [1]])) # Reduction at both left and right, with the right one obliterating one subdomain assert len(d.reshape(-1, -2)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(-1, -2), [[0], []])) # Reduction at both left and right obliterating all subdomains # triggering an exception assert len(d.reshape(-1, -3)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(-1, -3), [[], []])) assert len(d.reshape(-2, -2)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(-1, -3), [[], []])) def test_reshape_slice(self): d = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 2) assert d.reshape(slice(None)) == d assert d.reshape(slice(2, 9)) == Decomposition([[0], [1, 2], [3, 4, 5], [6]], 2) assert d.reshape(slice(3, 5)) == Decomposition([[], [0, 1], [], []], 2) assert d.reshape(slice(3, 3)) == Decomposition([[], [], [], []], 2) assert d.reshape(slice(13, 13)) == Decomposition([[], [], [], []], 2) assert d.reshape(slice(2, None)) == Decomposition([[0], [1, 2], [3, 4, 5], [6, 7, 8, 9]], 2) assert d.reshape(slice(4)) == Decomposition([[0, 1, 2], [3], [], []], 2) assert d.reshape(slice(-2, 2)) == Decomposition([[0, 1, 2, 3], [], [], []], 2) assert d.reshape(slice(-2)) == Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9]], 2) assert d.reshape(slice(3, -1)) == Decomposition([[], [0, 1], [2, 3, 4], [5, 6, 7]], 2) def test_reshape_iterable(self): d = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 2) assert d.reshape(()) == Decomposition([[], [], [], []], 2) assert d.reshape((1, 3, 5)) == Decomposition([[0], [1], [2], []], 2) assert d.reshape((1, 3, 10, 11)) == Decomposition([[0], [1], [], [2, 3]], 2) assert d.reshape((1, 3, 10, 11, 14)) == Decomposition([[0], [1], [], [2, 3]], 2) class TestDataDistributed(object): """ Test Data indexing and manipulation when distributed over a set of MPI processes. """ @pytest.mark.parallel(mode=4) def test_localviews(self): grid = Grid(shape=(4, 4)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map myrank = grid.distributor.myrank u = Function(name='u', grid=grid) u.data[:] = grid.distributor.myrank assert u.data_ro_domain._local[0, 0] == grid.distributor.myrank assert u.data_ro_domain._local[1, 1] == grid.distributor.myrank assert u.data_ro_domain._local[-1, -1] == grid.distributor.myrank assert u.data_ro_with_halo._local[1, 1] == grid.distributor.myrank if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data_ro_with_halo._local[1:, 1:] == myrank) assert np.all(u.data_ro_with_halo._local[0] == 0.) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(u.data_ro_with_halo._local[1:3, :2] == myrank) assert np.all(u.data_ro_with_halo._local[0] == 0.) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data_ro_with_halo._local[:2, 1:3] == myrank) assert np.all(u.data_ro_with_halo._local[2] == 0.) else: assert np.all(u.data_ro_with_halo._local[:2, :2] == myrank) assert np.all(u.data_ro_with_halo._local[2] == 0.) @pytest.mark.parallel(mode=4) def test_trivial_insertion(self): grid = Grid(shape=(4, 4)) u = Function(name='u', grid=grid, space_order=0) v = Function(name='v', grid=grid, space_order=1) u.data[:] = 1. assert np.all(u.data == 1.) assert np.all(u.data._local == 1.) v.data_with_halo[:] = 1. assert v.data_with_halo[:].shape == (3, 3) assert np.all(v.data_with_halo == 1.) assert np.all(v.data_with_halo[:] == 1.) assert np.all(v.data_with_halo._local == 1.) @pytest.mark.parallel(mode=4) def test_indexing(self): grid = Grid(shape=(4, 4)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map myrank = grid.distributor.myrank u = Function(name='u', grid=grid, space_order=0) u.data[:] = myrank if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert u.data[0, 0] == myrank assert u.data[2, 2] is None assert u.data[2].size == 0 assert u.data[:, 2].size == 0 elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert u.data[0, 0] is None assert u.data[2, 2] is None assert u.data[2].size == 0 assert np.all(u.data[:, 2] == [myrank, myrank]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert u.data[0, 0] is None assert u.data[2, 2] is None assert np.all(u.data[2] == [myrank, myrank]) assert u.data[:, 2].size == 0 else: assert u.data[0, 0] is None assert u.data[2, 2] == myrank assert np.all(u.data[2] == [myrank, myrank]) assert np.all(u.data[:, 2] == [myrank, myrank]) @pytest.mark.parallel(mode=4) def test_slicing(self): grid = Grid(shape=(4, 4)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map myrank = grid.distributor.myrank u = Function(name='u', grid=grid, space_order=0) u.data[:] = myrank # `u.data` is a view of the global data array restricted, on each rank, # to the local rank domain, so it must be == myrank assert np.all(u.data == myrank) assert np.all(u.data._local == myrank) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data[:2, :2] == myrank) assert u.data[:2, 2:].size == u.data[2:, :2].size == u.data[2:, 2:].size == 0 elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(u.data[:2, 2:] == myrank) assert u.data[:2, :2].size == u.data[2:, :2].size == u.data[2:, 2:].size == 0 elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data[2:, :2] == myrank) assert u.data[:2, 2:].size == u.data[:2, :2].size == u.data[2:, 2:].size == 0 else: assert np.all(u.data[2:, 2:] == myrank) assert u.data[:2, 2:].size == u.data[2:, :2].size == u.data[:2, :2].size == 0 @pytest.mark.parallel(mode=4) def test_slicing_ns(self): # Test slicing with a negative step grid = Grid(shape=(4, 4)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map myrank = grid.distributor.myrank u = Function(name='u', grid=grid, space_order=0) u.data[:] = myrank dat = np.arange(16, dtype=np.int32) dat = dat.reshape(grid.shape) u.data[::-1, ::-1] = dat if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data == [[15, 14], [11, 10]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(u.data == [[13, 12], [9, 8]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data == [[7, 6], [3, 2]]) else: assert np.all(u.data == [[5, 4], [1, 0]]) @pytest.mark.parallel(mode=4) def test_getitem(self): # __getitem__ mpi slicing tests: grid = Grid(shape=(8, 8)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map f = Function(name='f', grid=grid, space_order=0, dtype=np.int32) test_dat = np.arange(64, dtype=np.int32) a = test_dat.reshape(grid.shape) f.data[:] = a result = np.array(f.data[::-1, ::-1]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(result[0] == [[63, 62, 61, 60]]) assert np.all(result[1] == [[55, 54, 53, 52]]) assert np.all(result[2] == [[47, 46, 45, 44]]) assert np.all(result[3] == [[39, 38, 37, 36]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(result[0] == [[59, 58, 57, 56]]) assert np.all(result[1] == [[51, 50, 49, 48]]) assert np.all(result[2] == [[43, 42, 41, 40]]) assert np.all(result[3] == [[35, 34, 33, 32]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(result[0] == [[31, 30, 29, 28]]) assert np.all(result[1] == [[23, 22, 21, 20]]) assert np.all(result[2] == [[15, 14, 13, 12]]) assert np.all(result[3] == [[7, 6, 5, 4]]) else: assert np.all(result[0] == [[27, 26, 25, 24]]) assert np.all(result[1] == [[19, 18, 17, 16]]) assert np.all(result[2] == [[11, 10, 9, 8]]) assert np.all(result[3] == [[3, 2, 1, 0]]) result1 = np.array(f.data[5, 6:1:-1]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert result1.size == 0 elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert result1.size == 0 elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(result1 == [[46, 45]]) else: assert np.all(result1 == [[44, 43, 42]]) result2 = np.array(f.data[6:4:-1, 6:1:-1]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert result2.size == 0 elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert result2.size == 0 elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(result2[0] == [[54, 53]]) assert np.all(result2[1] == [[46, 45]]) else: assert np.all(result2[0] == [[52, 51, 50]]) assert np.all(result2[1] == [[44, 43, 42]]) result3 = np.array(f.data[6:4:-1, 2:7]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert result3.size == 0 elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert result3.size == 0 elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(result3[0] == [[50, 51]]) assert np.all(result3[1] == [[42, 43]]) else: assert np.all(result3[0] == [[52, 53, 54]]) assert np.all(result3[1] == [[44, 45, 46]]) result4 = np.array(f.data[4:2:-1, 6:1:-1]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(result4 == [[38, 37]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(result4 == [[36, 35, 34]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(result4 == [[30, 29]]) else: assert np.all(result4 == [[28, 27, 26]]) @pytest.mark.parallel(mode=4) def test_big_steps(self): # Test slicing with a step size > 1 grid = Grid(shape=(8, 8)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map f = Function(name='f', grid=grid, space_order=0, dtype=np.int32) test_dat = np.arange(64, dtype=np.int32) a = test_dat.reshape(grid.shape) f.data[:] = a r0 = np.array(f.data[::3, ::3]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(r0 == [[0, 3], [24, 27]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(r0 == [[6], [30]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(r0 == [[48, 51]]) else: assert np.all(r0 == [[54]]) r1 = np.array(f.data[1::3, 1::3]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(r1 == [[9]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(r1 == [[12, 15]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(r1 == [[33], [57]]) else: assert np.all(r1 == [[36, 39], [60, 63]]) r2 = np.array(f.data[::-3, ::-3]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(r2 == [[63]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(r2 == [[60, 57]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(r2 == [[39], [15]]) else: assert np.all(r2 == [[36, 33], [12, 9]]) r3 = np.array(f.data[6::-3, 6::-3]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(r3 == [[54, 51], [30, 27]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(r3 == [[48], [24]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(r3 == [[6, 3]]) else: assert np.all(r3 == [[0]]) @pytest.mark.parallel(mode=4) def test_setitem(self): # __setitem__ mpi slicing tests grid = Grid(shape=(12, 12)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map g = Function(name='g', grid=grid, space_order=0, dtype=np.int32) h = Function(name='h', grid=grid, space_order=0, dtype=np.int32) grid1 = Grid(shape=(8, 8)) f = Function(name='f', grid=grid1, space_order=0, dtype=np.int32) test_dat = np.arange(64, dtype=np.int32) a = test_dat.reshape(grid1.shape) f.data[:] = a g.data[0, 0:3] = f.data[7, 4:7] if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(np.array(g.data) == [[60, 61, 62, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(np.array(g.data)) == 0 elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(np.array(g.data)) == 0 else: assert np.all(np.array(g.data)) == 0 h.data[2:10, 2:10] = f.data[::-1, ::-1] if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(np.array(h.data) == [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 63, 62, 61, 60], [0, 0, 55, 54, 53, 52], [0, 0, 47, 46, 45, 44], [0, 0, 39, 38, 37, 36]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(np.array(h.data) == [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [59, 58, 57, 56, 0, 0], [51, 50, 49, 48, 0, 0], [43, 42, 41, 40, 0, 0], [35, 34, 33, 32, 0, 0]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(np.array(h.data) == [[0, 0, 31, 30, 29, 28], [0, 0, 23, 22, 21, 20], [0, 0, 15, 14, 13, 12], [0, 0, 7, 6, 5, 4], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]) else: assert np.all(np.array(h.data) == [[27, 26, 25, 24, 0, 0], [19, 18, 17, 16, 0, 0], [11, 10, 9, 8, 0, 0], [3, 2, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]) @pytest.mark.parallel(mode=4) def test_hd_slicing(self): # Test higher dimension slices grid = Grid(shape=(4, 4, 4)) x, y, z = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map t = Function(name='t', grid=grid, space_order=0) dat = np.arange(64, dtype=np.int32) b = dat.reshape(grid.shape) t.data[:] = b c = np.array(t.data[::-1, ::-1, ::-1]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(c[:, :, 0] == [[63, 59], [47, 43]]) assert np.all(c[:, :, 3] == [[60, 56], [44, 40]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(c[:, :, 0] == [[55, 51], [39, 35]]) assert np.all(c[:, :, 3] == [[52, 48], [36, 32]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(c[:, :, 0] == [[31, 27], [15, 11]]) assert np.all(c[:, :, 3] == [[28, 24], [12, 8]]) else: assert np.all(c[:, :, 0] == [[23, 19], [7, 3]]) assert np.all(c[:, :, 3] == [[20, 16], [4, 0]]) d = np.array(t.data[::-1]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(d[:, :, 1] == [[49, 53], [33, 37]]) assert np.all(d[:, :, 2] == [[50, 54], [34, 38]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(d[:, :, 1] == [[57, 61], [41, 45]]) assert np.all(d[:, :, 2] == [[58, 62], [42, 46]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(d[:, :, 1] == [[17, 21], [1, 5]]) assert np.all(d[:, :, 2] == [[18, 22], [2, 6]]) else: assert np.all(d[:, :, 1] == [[25, 29], [9, 13]]) assert np.all(d[:, :, 2] == [[26, 30], [10, 14]]) e = np.array(t.data[:, 3:2:-1]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert e.size == 0 elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(e[:, :, 0] == [[12], [28]]) assert np.all(e[:, :, 1] == [[13], [29]]) assert np.all(e[:, :, 2] == [[14], [30]]) assert np.all(e[:, :, 3] == [[15], [31]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert e.size == 0 else: assert np.all(e[:, :, 0] == [[44], [60]]) assert np.all(e[:, :, 1] == [[45], [61]]) assert np.all(e[:, :, 2] == [[46], [62]]) assert np.all(e[:, :, 3] == [[47], [63]]) @pytest.mark.parallel(mode=4) def test_niche_slicing(self): grid0 = Grid(shape=(8, 8)) x0, y0 = grid0.dimensions glb_pos_map0 = grid0.distributor.glb_pos_map f = Function(name='f', grid=grid0, space_order=0, dtype=np.int32) dat = np.arange(64, dtype=np.int32) a = dat.reshape(grid0.shape) f.data[:] = a grid1 = Grid(shape=(12, 12)) x1, y1 = grid1.dimensions glb_pos_map1 = grid1.distributor.glb_pos_map h = Function(name='h', grid=grid1, space_order=0, dtype=np.int32) grid2 = Grid(shape=(4, 4, 4)) t = Function(name='t', grid=grid2, space_order=0) b = dat.reshape(grid2.shape) t.data[:] = b tdat0 = np.array(f.data[-2::, -2::]) if LEFT in glb_pos_map0[x0] and LEFT in glb_pos_map0[y0]: assert tdat0.size == 0 elif LEFT in glb_pos_map0[x0] and RIGHT in glb_pos_map0[y0]: assert tdat0.size == 0 elif RIGHT in glb_pos_map0[x0] and LEFT in glb_pos_map0[y0]: assert tdat0.size == 0 else: assert np.all(tdat0 == [[54, 55], [62, 63]]) h.data[9:1:-1, 9:1:-1] = f.data[:, :] if LEFT in glb_pos_map1[x1] and LEFT in glb_pos_map1[y1]: assert np.all(np.array(h.data) == [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 63, 62, 61, 60], [0, 0, 55, 54, 53, 52], [0, 0, 47, 46, 45, 44], [0, 0, 39, 38, 37, 36]]) elif LEFT in glb_pos_map1[x1] and RIGHT in glb_pos_map1[y1]: assert np.all(np.array(h.data) == [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [59, 58, 57, 56, 0, 0], [51, 50, 49, 48, 0, 0], [43, 42, 41, 40, 0, 0], [35, 34, 33, 32, 0, 0]]) elif RIGHT in glb_pos_map1[x1] and LEFT in glb_pos_map1[y1]: assert np.all(np.array(h.data) == [[0, 0, 31, 30, 29, 28], [0, 0, 23, 22, 21, 20], [0, 0, 15, 14, 13, 12], [0, 0, 7, 6, 5, 4], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]) else: assert np.all(np.array(h.data) == [[27, 26, 25, 24, 0, 0], [19, 18, 17, 16, 0, 0], [11, 10, 9, 8, 0, 0], [3, 2, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]) f.data[:] = 0 f.data[::2, ::2] = t.data[:, :, 0] if LEFT in glb_pos_map0[x0] and LEFT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[0, 0, 4, 0], [0, 0, 0, 0], [16, 0, 20, 0], [0, 0, 0, 0]]) elif LEFT in glb_pos_map0[x0] and RIGHT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[8, 0, 12, 0], [0, 0, 0, 0], [24, 0, 28, 0], [0, 0, 0, 0]]) elif RIGHT in glb_pos_map0[x0] and LEFT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[32, 0, 36, 0], [0, 0, 0, 0], [48, 0, 52, 0], [0, 0, 0, 0]]) else: assert np.all(np.array(f.data) == [[40, 0, 44, 0], [0, 0, 0, 0], [56, 0, 60, 0], [0, 0, 0, 0]]) f.data[:] = 0 f.data[1::2, 1::2] = t.data[:, :, 0] if LEFT in glb_pos_map0[x0] and LEFT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[0, 0, 0, 0], [0, 0, 0, 4], [0, 0, 0, 0], [0, 16, 0, 20]]) elif LEFT in glb_pos_map0[x0] and RIGHT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[0, 0, 0, 0], [0, 8, 0, 12], [0, 0, 0, 0], [0, 24, 0, 28]]) elif RIGHT in glb_pos_map0[x0] and LEFT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[0, 0, 0, 0], [0, 32, 0, 36], [0, 0, 0, 0], [0, 48, 0, 52]]) else: assert np.all(np.array(f.data) == [[0, 0, 0, 0], [0, 40, 0, 44], [0, 0, 0, 0], [0, 56, 0, 60]]) f.data[:] = 0 f.data[6::-2, 6::-2] = t.data[:, :, 0] if LEFT in glb_pos_map0[x0] and LEFT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[60, 0, 56, 0], [0, 0, 0, 0], [44, 0, 40, 0], [0, 0, 0, 0]]) elif LEFT in glb_pos_map0[x0] and RIGHT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[52, 0, 48, 0], [0, 0, 0, 0], [36, 0, 32, 0], [0, 0, 0, 0]]) elif RIGHT in glb_pos_map0[x0] and LEFT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[28, 0, 24, 0], [0, 0, 0, 0], [12, 0, 8, 0], [0, 0, 0, 0]]) else: assert np.all(np.array(f.data) == [[20, 0, 16, 0], [0, 0, 0, 0], [4, 0, 0, 0], [0, 0, 0, 0]]) @pytest.mark.parallel(mode=4) @pytest.mark.parametrize('shape, slice0, slice1, slice2', [ ((31, 31, 31), (slice(None, None, 1), 2, slice(None, None, 1)), (slice(None, None, 1), 0, slice(None, None, 1)), (slice(None, None, 1), 1, slice(None, None, 1))), ((17, 17, 17), (slice(None, None, 1), slice(None, None, 1), 2), (slice(None, None, 1), slice(None, None, 1), 0), (slice(None, None, 1), slice(None, None, 1), 1)), ((8, 8, 8), (slice(None, None, 1), 5, slice(None, None, 1)), (slice(None, None, 1), 1, slice(None, None, 1)), (slice(None, None, 1), 7, slice(None, None, 1)))]) def test_niche_slicing2(self, shape, slice0, slice1, slice2): grid = Grid(shape=shape) f = Function(name='f', grid=grid) f.data[:] = 1 f.data[slice0] = f.data[slice1] f.data[slice0] += f.data[slice2] result0 = np.array(f.data[slice0]) expected0 = np.full(result0.shape, 2) assert(np.all(result0 == expected0)) result1 = np.array(f.data[slice1]) expected1 = np.full(result1.shape, 1) assert(np.all(result1 == expected1)) result2 = np.array(f.data[slice2]) expected2 = np.full(result2.shape, 1) assert(np.all(result2 == expected2)) @pytest.mark.parallel(mode=4) def test_neg_start_stop(self): grid0 = Grid(shape=(8, 8)) f = Function(name='f', grid=grid0, space_order=0, dtype=np.int32) dat = np.arange(64, dtype=np.int32) a = dat.reshape(grid0.shape) f.data[:] = a grid1 = Grid(shape=(12, 12)) x, y = grid1.dimensions glb_pos_map = grid1.distributor.glb_pos_map h = Function(name='h', grid=grid1, space_order=0, dtype=np.int32) slices = (slice(-3, -1, 1), slice(-1, -5, -1)) h.data[8:10, 0:4] = f.data[slices] if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.count_nonzero(h.data[:]) == 0 elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.count_nonzero(h.data[:]) == 0 elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(np.array(h.data) == [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [47, 46, 45, 44, 0, 0], [55, 54, 53, 52, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]) else: assert np.count_nonzero(h.data[:]) == 0 @pytest.mark.parallel(mode=4) def test_indexing_in_views(self): grid = Grid(shape=(4, 4)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map myrank = grid.distributor.myrank u = Function(name='u', grid=grid, space_order=0) u.data[:] = myrank # Note that the `1`s are global indices view = u.data[1:, 1:] assert np.all(view[:] == myrank) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert view.shape == (1, 1) assert np.all(view == 0.) assert view[0, 0] == 0. assert view[1, 1] is None assert view[1].shape == (0, 1) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert view.shape == (1, 2) assert np.all(view == 1.) assert view[0, 0] is None assert view[1, 1] is None assert view[1].shape == (0, 2) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert view.shape == (2, 1) assert np.all(view == 2.) assert view[0, 0] is None assert view[1, 1] is None assert view[1].shape == (1,) assert np.all(view[1] == 2.) else: assert view.shape == (2, 2) assert np.all(view == 3.) assert view[0, 0] is None assert view[1, 1] == 3. assert view[1].shape == (2,) assert np.all(view[1] == 3.) # Now we further slice into `view` view2 = view[1:, 1:] assert np.all(view2[:] == myrank) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert view2.shape == (0, 0) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert view2.shape == (0, 2) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert view2.shape == (2, 0) else: assert view2.shape == (2, 2) # Now a change in `view2` by the only rank that "sees" it should affect # both `view` and `u.data` view2[:] += 1 if RIGHT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(u.data[:] == myrank + 1) assert np.all(view[:] == myrank + 1) assert np.all(view2[:] == myrank + 1) else: assert np.all(view[:] == myrank) assert np.all(view2[:] == myrank) assert view2.size == 0 @pytest.mark.parallel(mode=4) def test_from_replicated_to_distributed(self): shape = (4, 4) grid = Grid(shape=shape) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map u = Function(name='u', grid=grid, space_order=0) # distributed v = Function(name='v', grid=grid, space_order=0) # distributed a = np.arange(16).reshape(shape) # replicated # Full array u.data[:] = a if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data == [[0, 1], [4, 5]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(u.data == [[2, 3], [6, 7]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data == [[8, 9], [12, 13]]) else: assert np.all(u.data == [[10, 11], [14, 15]]) # Subsection (all ranks touched) u.data[:] = 0 u.data[1:3, 1:3] = a[1:3, 1:3] # Same as above but with negative indices v.data[:] = 0 v.data[1:-1, 1:-1] = a[1:-1, 1:-1] if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data == [[0, 0], [0, 5]]) assert np.all(v.data == [[0, 0], [0, 5]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(u.data == [[0, 0], [6, 0]]) assert np.all(v.data == [[0, 0], [6, 0]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data == [[0, 9], [0, 0]]) assert np.all(v.data == [[0, 9], [0, 0]]) else: assert np.all(u.data == [[10, 0], [0, 0]]) assert np.all(v.data == [[10, 0], [0, 0]]) # The assigned data must have same shape as the one of the distributed array, # otherwise an exception is expected try: u.data[1:3, 1:3] = a[1:2, 1:2] except ValueError: assert True except: assert False @pytest.mark.parallel(mode=4) def test_misc_setup(self): """Test setup of Functions with mixed distributed/replicated Dimensions.""" grid = Grid(shape=(4, 4)) _, y = grid.dimensions dy = Dimension(name='dy') # Note: `grid` must be passed to `c` since `x` is a distributed dimension, # and `grid` carries the `x` decomposition c = Function(name='c', grid=grid, dimensions=(y, dy), shape=(4, 5)) # The following should be identical to `c` in everything but the name c2 = Function(name='c2', grid=grid, dimensions=(y, dy), shape=(None, 5)) assert c.shape == c2.shape == (2, 5) assert c.shape_with_halo == c2.shape_with_halo assert c._decomposition == c2._decomposition # The following should all raise an exception as illegal try: Function(name='c3', grid=grid, dimensions=(y, dy)) assert False except TypeError: # Missing `shape` assert True # The following should all raise an exception as illegal try: Function(name='c4', grid=grid, dimensions=(y, dy), shape=(3, 5)) assert False except ValueError: # The provided y-size, 3, doesn't match the y-size in grid (4) assert True # The following should all raise an exception as illegal try: Function(name='c4', grid=grid, dimensions=(y, dy), shape=(4,)) assert False except ValueError: # Too few entries for `shape` (two expected, for `y` and `dy`) assert True @pytest.mark.parallel(mode=4) def test_misc_data(self): """ Test data insertion/indexing for Functions with mixed distributed/replicated Dimensions. """ dx = Dimension(name='dx') grid = Grid(shape=(4, 4)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map # Note: `grid` must be passed to `c` since `x` is a distributed dimension, # and `grid` carries the `x` decomposition c = Function(name='c', grid=grid, dimensions=(x, dx), shape=(4, 5)) # Data insertion for i in range(4): c.data[i, 0] = 1.0+i c.data[i, 1] = 1.0+i c.data[i, 2] = 3.0+i c.data[i, 3] = 6.0+i c.data[i, 4] = 5.0+i # Data indexing if LEFT in glb_pos_map[x]: assert(np.all(c.data[0] == [1., 1., 3., 6., 5.])) assert(np.all(c.data[1] == [2., 2., 4., 7., 6.])) else: assert(np.all(c.data[2] == [3., 3., 5., 8., 7.])) assert(np.all(c.data[3] == [4., 4., 6., 9., 8.])) # Same as before, but with negative indices and non-trivial slices if LEFT in glb_pos_map[x]: assert(np.all(c.data[0:-3] == [1., 1., 3., 6., 5.])) assert(np.all(c.data[-3:-2] == [2., 2., 4., 7., 6.])) else: assert(np.all(c.data[-2:-1] == [3., 3., 5., 8., 7.])) assert(np.all(c.data[-1] == [4., 4., 6., 9., 8.])) class TestDataGather(object): @pytest.mark.parallel(mode=4) @pytest.mark.parametrize('rank', [0, 1, 2, 3]) def test_simple_gather(self, rank): """ Test a simple gather on various ranks.""" grid = Grid(shape=(10, 10), extent=(9, 9)) f = Function(name='f', grid=grid, dtype=np.int32) res = np.arange(100).reshape(grid.shape) f.data[:] = res myrank = grid._distributor.comm.Get_rank() ans = f.data_gather(rank=rank) if myrank == rank: assert np.all(ans == res) else: assert ans.shape == (0, )*len(grid.shape) @pytest.mark.parallel(mode=4) @pytest.mark.parametrize('start, stop, step', [ (None, None, None), (None, None, 2), (None, None, -1), (None, None, -2), (1, 8, 3), ((0, 4), None, (2, 1))]) def test_sliced_gather_2D(self, start, stop, step): """ Test gather for various 2D slices.""" grid = Grid(shape=(10, 10), extent=(9, 9)) f = Function(name='f', grid=grid, dtype=np.int32) dat = np.arange(100).reshape(grid.shape) if isinstance(step, int) or step is None: step = [step for _ in grid.shape] if isinstance(start, int) or start is None: start = [start for _ in grid.shape] if isinstance(stop, int) or stop is None: stop = [stop for _ in grid.shape] idx = [] for i, j, k in zip(start, stop, step): idx.append(slice(i, j, k)) idx = tuple(idx) res = dat[idx] f.data[:] = dat myrank = grid._distributor.comm.Get_rank() ans = f.data_gather(start=start, stop=stop, step=step) if myrank == 0: assert np.all(ans == res) else: assert ans.shape == (0, )*len(grid.shape) @pytest.mark.parallel(mode=4) @pytest.mark.parametrize('start, stop, step', [ (None, None, None), (None, None, 2), (None, None, -1), (None, None, -2), (1, 8, 3), ((0, 4, 4), None, (2, 1, 1))]) def test_sliced_gather_3D(self, start, stop, step): """ Test gather for various 3D slices.""" grid = Grid(shape=(10, 10, 10), extent=(9, 9, 9)) f = Function(name='f', grid=grid, dtype=np.int32) dat = np.arange(1000).reshape(grid.shape) if isinstance(step, int) or step is None: step = [step for _ in grid.shape] if isinstance(start, int) or start is None: start = [start for _ in grid.shape] if isinstance(stop, int) or stop is None: stop = [stop for _ in grid.shape] idx = [] for i, j, k in zip(start, stop, step): idx.append(slice(i, j, k)) idx = tuple(idx) res = dat[idx] f.data[:] = dat myrank = grid._distributor.comm.Get_rank() ans = f.data_gather(start=start, stop=stop, step=step) if myrank == 0: assert np.all(ans == res) else: assert ans.shape == (0, )*len(grid.shape) def test_scalar_arg_substitution(): """ Tests the relaxed (compared to other devito sympy subclasses) substitution semantics for scalars, which is used for argument substitution into symbolic expressions. """ t0 = Scalar(name='t0').indexify() t1 = Scalar(name='t1').indexify() assert t0 != 0 assert t0.subs('t0', 2) == 2 assert t0.subs('t0', t1) == t1 @pytest.mark.skip(reason="will corrupt memory and risk crash") def test_oob_noguard(): """ Tests the guard page allocator. This writes to memory it shouldn't, and typically gets away with it. """ # A tiny grid grid = Grid(shape=(4, 4)) u = Function(name='u', grid=grid, space_order=0, allocator=ALLOC_FLAT) Operator(Eq(u[2000, 0], 1.0)).apply() @pytest.mark.skip(reason="will crash entire test suite") def test_oob_guard(): """ Tests the guard page allocator. This causes a segfault in the test suite, deliberately. """ # A tiny grid grid = Grid(shape=(4, 4)) u = Function(name='u', grid=grid, space_order=0, allocator=ALLOC_GUARD) Operator(Eq(u[2000, 0], 1.0)).apply() def test_numpy_c_contiguous(): """ Test that devito.Data is correctly reported by NumPy as being C-contiguous """ grid = Grid(shape=(4, 4)) u = Function(name='u', grid=grid, space_order=2) assert(u._data_allocated.flags.c_contiguous) def test_external_allocator(): shape = (2, 2) space_order = 0 numpy_array = np.ones(shape, dtype=np.float32) g = Grid(shape) f = Function(name='f', space_order=space_order, grid=g, allocator=ExternalAllocator(numpy_array), initializer=lambda x: None) # Ensure the two arrays have the same value assert(np.array_equal(f.data, numpy_array)) # Ensure the original numpy array is unchanged assert(np.array_equal(numpy_array, np.ones(shape, dtype=np.float32))) # Change the underlying numpy array numpy_array[:] = 3. # Ensure the function.data changes too assert(np.array_equal(f.data, numpy_array)) # Change the function.data f.data[:] = 4. # Ensure the underlying numpy array changes too assert(np.array_equal(f.data, numpy_array)) if __name__ == "__main__": configuration['mpi'] = True TestDataDistributed().test_misc_data()
opesci/devito
tests/test_data.py
devito/ir/equations/__init__.py
from collections import defaultdict from sympy import Mod, Mul from devito.exceptions import InvalidOperator from devito.ir.clusters import Queue from devito.ir.support import Forward, SEQUENTIAL from devito.tools import (DefaultOrderedDict, frozendict, is_integer, indices_to_sections, timed_pass) from devito.types import (CustomDimension, Ge, Le, Lock, WaitLock, WithLock, FetchWait, FetchWaitPrefetch, Delete, normalize_syncs) __all__ = ['Tasker', 'Streaming'] class Asynchronous(Queue): def __init__(self, key): assert callable(key) self.key = key super().__init__() class Tasker(Asynchronous): """ Create asynchronous Clusters, or "tasks". Parameters ---------- key : callable, optional A Cluster `c` becomes an asynchronous task only if `key(c)` returns True Notes ----- From an implementation viewpoint, an asynchronous Cluster is a Cluster with attached suitable SyncOps, such as WaitLock, WithLock, etc. """ @timed_pass(name='tasker') def process(self, clusters): return super().process(clusters) def callback(self, clusters, prefix): if not prefix: return clusters d = prefix[-1].dim if not all(SEQUENTIAL in c.properties[d] for c in clusters): return clusters locks = {} waits = defaultdict(list) tasks = defaultdict(list) for c0 in clusters: if not self.key(c0): # Not a candidate asynchronous task continue # Prevent future writes to interfere with a task by waiting on a lock may_require_lock = set(c0.scope.reads) # Sort for deterministic code generation may_require_lock = sorted(may_require_lock, key=lambda i: i.name) protected = defaultdict(set) for c1 in clusters: offset = int(clusters.index(c1) <= clusters.index(c0)) for f in may_require_lock: try: writes = c1.scope.writes[f] except KeyError: # No read-write dependency, ignore continue try: if all(w.aindices[d].is_Stepping for w in writes) or \ all(w.aindices[d].is_Modulo for w in writes): size = f.shape_allocated[d] assert is_integer(size) ld = CustomDimension(name='ld', symbolic_size=size, parent=d) elif all(w[d] == 0 for w in writes): # Special case, degenerates to scalar lock raise KeyError else: # Functions over non-stepping Dimensions need no lock continue except KeyError: # Would degenerate to a scalar, but we rather use a lock # of size 1 for simplicity ld = CustomDimension(name='ld', symbolic_size=1) lock = locks.setdefault(f, Lock(name='lock%d' % len(locks), dimensions=ld, target=f)) for w in writes: try: index = w[d] logical_index = index + offset except TypeError: assert ld.symbolic_size == 1 index = 0 logical_index = 0 if logical_index in protected[f]: continue waits[c1].append(WaitLock(lock[index])) protected[f].add(logical_index) # Taskify `c0` for f in protected: lock = locks[f] indices = sorted({r[d] for r in c0.scope.reads[f]}) if indices == [None]: # `lock` is protecting a Function which isn't defined over `d` # E.g., `d=time` and the protected function is `a(x, y)` assert lock.size == 1 indices = [0] tasks[c0].extend(WithLock(lock[i]) for i in indices) processed = [] for c in clusters: if waits[c] or tasks[c]: processed.append(c.rebuild(syncs={d: waits[c] + tasks[c]})) else: processed.append(c) return processed class Streaming(Asynchronous): """ Tag Clusters with the FetchWait, FetchWaitPrefetch and Delete SyncOps to stream Functions in and out the process memory. Parameters ---------- key : callable, optional Return the Functions that need to be streamed in a given Cluster. """ @timed_pass(name='streaming') def process(self, clusters): return super().process(clusters) def callback(self, clusters, prefix): if not prefix: return clusters it = prefix[-1] d = it.dim direction = it.direction try: pd = prefix[-2].dim except IndexError: pd = None # What are the stream-able Dimensions? # 0) all sequential Dimensions # 1) all CustomDimensions of fixed (i.e. integer) size, which # implies a bound on the amount of streamed data if all(SEQUENTIAL in c.properties[d] for c in clusters): make_fetch = lambda f, i, s, cb: FetchWaitPrefetch(f, d, direction, i, s, cb) make_delete = lambda f, i, s, cb: Delete(f, d, direction, i, s, cb) syncd = d elif d.is_Custom and is_integer(it.size): make_fetch = lambda f, i, s, cb: FetchWait(f, d, direction, i, it.size, cb) make_delete = lambda f, i, s, cb: Delete(f, d, direction, i, it.size, cb) syncd = pd else: return clusters first_seen = {} last_seen = {} for c in clusters: candidates = self.key(c) if not candidates: continue for i in c.scope.accesses: f = i.function if f in candidates: k = (f, i[d]) first_seen.setdefault(k, c) last_seen[k] = c if not first_seen: return clusters # Bind fetches and deletes to Clusters sync_ops = defaultdict(list) callbacks = [(frozendict(first_seen), make_fetch), (frozendict(last_seen), make_delete)] for seen, callback in callbacks: mapper = defaultdict(lambda: DefaultOrderedDict(list)) for (f, v), c in seen.items(): mapper[c][f].append(v) for c, m in mapper.items(): for f, v in m.items(): for i, s in indices_to_sections(v): next_cbk = make_next_cbk(c.guards.get(d), d, direction) sync_ops[c].append(callback(f, i, s, next_cbk)) # Attach SyncOps to Clusters processed = [] for c in clusters: v = sync_ops.get(c) if v is not None: processed.append(c.rebuild(syncs=normalize_syncs(c.syncs, {syncd: v}))) else: processed.append(c) return processed # Utilities def make_next_cbk(rel, d, direction): """ Create a callable that given a symbol returns a sympy.Relational usable to express, in symbolic form, whether the next fetch/prefetch will be executed. """ if rel is None: if direction is Forward: return lambda s: Le(s, d.symbolic_max) else: return lambda s: Ge(s, d.symbolic_min) else: # Only case we know how to deal with, today, is the one induced # by a ConditionalDimension with structured condition (e.g. via `factor`) if not (rel.is_Equality and rel.rhs == 0 and isinstance(rel.lhs, Mod)): raise InvalidOperator("Unable to understand data streaming pattern") _, v = rel.lhs.args if direction is Forward: # The LHS rounds `s` up to the nearest multiple of `v` return lambda s: Le(Mul(((s + v - 1) / v), v, evaluate=False), d.symbolic_max) else: # The LHS rounds `s` down to the nearest multiple of `v` return lambda s: Ge(Mul((s / v), v, evaluate=False), d.symbolic_min)
import pytest import numpy as np from devito import (Grid, Function, TimeFunction, SparseTimeFunction, Dimension, # noqa Eq, Operator, ALLOC_GUARD, ALLOC_FLAT, configuration, switchconfig) from devito.data import LEFT, RIGHT, Decomposition, loc_data_idx, convert_index from devito.tools import as_tuple from devito.types import Scalar from devito.data.allocators import ExternalAllocator class TestDataBasic(object): def test_simple_indexing(self): """Test data packing/unpacking via basic indexing.""" grid = Grid(shape=(16, 16, 16)) u = Function(name='yu3D', grid=grid, space_order=0) # Test simple insertion and extraction u.data[0, 1, 1] = 1. assert u.data[0, 0, 0] == 0. assert u.data[0, 1, 1] == 1. assert np.all(u.data == u.data[:, :, :]) assert 1. in u.data[0] assert 1. in u.data[0, 1] # Test negative indices assert u.data[0, -15, -15] == 1. u.data[6, 0, 0] = 1. assert u.data[-10, :, :].sum() == 1. # Test setting whole array to given value u.data[:] = 3. assert np.all(u.data == 3.) # Test insertion of single value into block u.data[5, :, 5] = 5. assert np.all(u.data[5, :, 5] == 5.) # Test extraction of block with negative indices sliced = u.data[-11, :, -11] assert sliced.shape == (16,) assert np.all(sliced == 5.) # Test insertion of block into block block = np.ndarray(shape=(1, 16, 1), dtype=np.float32) block.fill(4.) u.data[4:5, :, 4:5] = block assert np.all(u.data[4, :, 4] == block) def test_advanced_indexing(self): """Test data packing/unpacking via advanced indexing.""" grid = Grid(shape=(4, 4, 4)) u = TimeFunction(name='yu4D', grid=grid, space_order=0, time_order=1) u.data[:] = 0. # Test slicing w/ negative indices, combined to explicit indexing u.data[1, 1:-1, 1:-1, 1:-1] = 6. assert np.all(u.data[0] == 0.) assert np.all(u.data[1, 1:-1, 1:-1, 1:-1] == 6.) assert np.all(u.data[1, :, 0] == 0.) assert np.all(u.data[1, :, -1] == 0.) assert np.all(u.data[1, :, :, 0] == 0.) assert np.all(u.data[1, :, :, -1] == 0.) def test_negative_step(self): """Test slicing with a negative step.""" grid = Grid(shape=(6, 6, 6)) u = TimeFunction(name='u', grid=grid, dtype=np.int32) u.data[:] = 0. dat = np.array([1, 2, 3, 4, 5, 6]) u.data[0, :, 0, 0] = dat assert (np.array(u.data[0, 3::-1, 0, 0]) == dat[3::-1]).all() assert (np.array(u.data[0, 5:1:-1, 0, 0]) == dat[5:1:-1]).all() def test_negative_start(self): """Test slicing with a negative start.""" grid = Grid(shape=(13,)) f = Function(name='f', grid=grid) idx = slice(-4, None, 1) dat = np.array([1, 2, 3, 4]) f.data[idx] = dat assert np.all(np.array(f.data[9:]) == dat) def test_halo_indexing(self): """Test data packing/unpacking in presence of a halo region.""" domain_shape = (16, 16, 16) grid = Grid(shape=domain_shape) u = Function(name='yu3D', grid=grid, space_order=2) assert u.shape == u.data.shape == domain_shape assert u._shape_with_inhalo == u.data_with_halo.shape == (20, 20, 20) assert u.shape_with_halo == u._shape_with_inhalo # W/o MPI, these two coincide # Test simple insertion and extraction u.data_with_halo[0, 0, 0] = 1. u.data[0, 0, 0] = 2. assert u.data_with_halo[0, 0, 0] == 1. assert u.data[0, 0, 0] == 2. assert u.data_with_halo[2, 2, 2] == 2. # Test negative indices u.data_with_halo[-1, -1, -1] = 3. assert u.data[-1, -1, -1] == 0. assert u.data_with_halo[-1, -1, -1] == 3. def test_broadcasting(self): """ Test Data broadcasting, expected to behave as NumPy broadcasting. Notes ----- Refer to https://docs.scipy.org/doc/numpy-1.15.0/user/basics.broadcasting.html for more info about NumPy broadcasting rules. """ grid = Grid(shape=(4, 4, 4)) u = Function(name='yu3D', grid=grid) u.data[:] = 2. # Assign from array with lower-dimensional shape v = np.ones(shape=(4, 4), dtype=u.dtype) u.data[:] = v assert np.all(u.data == 1.) # Assign from array with higher-dimensional shape causes a ValueError exception v = np.zeros(shape=(4, 4, 4, 4), dtype=u.dtype) try: u.data[:] = v except ValueError: assert True except: assert False # Assign from array having shape with some 1-valued entries v = np.zeros(shape=(4, 1, 4), dtype=u.dtype) u.data[:] = v assert np.all(u.data == 0.) def test_arithmetic(self): """Test arithmetic operations involving Data objects.""" grid = Grid(shape=(16, 16, 16)) u = Function(name='yu3D', grid=grid, space_order=0) u.data[:] = 1 # Simple arithmetic assert np.all(u.data == 1) assert np.all(u.data + 2. == 3.) assert np.all(u.data - 2. == -1.) assert np.all(u.data * 2. == 2.) assert np.all(u.data / 2. == 0.5) assert np.all(u.data % 2 == 1.) # Increments and partial increments u.data[:] += 2. assert np.all(u.data == 3.) u.data[9, :, :] += 1. assert all(np.all(u.data[i, :, :] == 3.) for i in range(9)) assert np.all(u.data[9, :, :] == 4.) # Right operations __rOP__ u.data[:] = 1. arr = np.ndarray(shape=(16, 16, 16), dtype=np.float32) arr.fill(2.) assert np.all(arr - u.data == 1.) def test_illegal_indexing(self): """Tests that indexing into illegal entries throws an exception.""" nt = 5 grid = Grid(shape=(4, 4, 4)) u = Function(name='u', grid=grid) v = TimeFunction(name='v', grid=grid, save=nt) try: u.data[5] assert False except IndexError: pass try: v.data[nt] assert False except IndexError: pass def test_logic_indexing(self): """Test logic indexing along stepping dimensions.""" grid = Grid(shape=(4, 4, 4)) v_mod = TimeFunction(name='v_mod', grid=grid) v_mod.data[0] = 1. v_mod.data[1] = 2. assert np.all(v_mod.data[0] == 1.) assert np.all(v_mod.data[1] == 2.) assert np.all(v_mod.data[2] == v_mod.data[0]) assert np.all(v_mod.data[4] == v_mod.data[0]) assert np.all(v_mod.data[3] == v_mod.data[1]) assert np.all(v_mod.data[-1] == v_mod.data[1]) assert np.all(v_mod.data[-2] == v_mod.data[0]) def test_indexing_into_sparse(self): """ Test indexing into SparseFunctions. """ grid = Grid(shape=(4, 4)) sf = SparseTimeFunction(name='sf', grid=grid, npoint=1, nt=10) sf.data[1:-1, 0] = np.arange(8) assert np.all(sf.data[1:-1, 0] == np.arange(8)) class TestLocDataIDX(object): """ Test the support function loc_data_idx. """ @pytest.mark.parametrize('idx, expected', [ ('(slice(10, None, -1), slice(11, None, -3))', '(slice(0, 11, 1), slice(2, 12, 3))'), ('(2, 5)', '(slice(2, 3, 1), slice(5, 6, 1))') ]) def test_loc_data_idx(self, idx, expected): """ Test loc_data_idx located in devito/data/utils.py """ idx = eval(idx) expected = eval(expected) result = loc_data_idx(idx) assert result == expected class TestMetaData(object): """ Test correctness of metadata describing size and offset of the various data regions, such as DOMAIN, HALO, etc. """ def test_wo_halo_wo_padding(self): grid = Grid(shape=(4, 4, 4)) u = Function(name='u', grid=grid, space_order=0, padding=0) assert u.shape == u._shape_with_inhalo == u.shape_allocated assert u.shape_with_halo == u._shape_with_inhalo # W/o MPI, these two coincide assert u._size_halo == u._size_owned == u._size_padding ==\ ((0, 0), (0, 0), (0, 0)) assert u._offset_domain == (0, 0, 0) assert u._offset_halo == u._offset_owned == ((0, 4), (0, 4), (0, 4)) def test_w_halo_wo_padding(self): grid = Grid(shape=(4, 4, 4)) u = Function(name='u', grid=grid, space_order=2, padding=0) assert len(u.shape) == len(u._size_halo.left) assert u._size_halo == u._size_owned == ((2, 2), (2, 2), (2, 2)) assert u._offset_domain == (2, 2, 2) assert u._offset_halo == ((0, 6), (0, 6), (0, 6)) assert u._offset_owned == ((2, 4), (2, 4), (2, 4)) assert tuple(i + j*2 for i, j in zip(u.shape, u._size_halo.left)) ==\ u.shape_with_halo # Try with different grid shape and space_order grid2 = Grid(shape=(3, 3, 3)) u2 = Function(name='u2', grid=grid2, space_order=4, padding=0) assert u2.shape == (3, 3, 3) assert u2._offset_domain == (4, 4, 4) assert u2._offset_halo == ((0, 7), (0, 7), (0, 7)) assert tuple(i + j*2 for i, j in zip(u2.shape, u2._size_halo.left)) ==\ u2.shape_with_halo assert u2.shape_with_halo == (11, 11, 11) def test_wo_halo_w_padding(self): grid = Grid(shape=(4, 4, 4)) u = Function(name='u', grid=grid, space_order=2, padding=((1, 1), (3, 3), (4, 4))) assert tuple(i + j + k for i, (j, k) in zip(u.shape_with_halo, u._padding)) ==\ u.shape_allocated assert u._halo == ((2, 2), (2, 2), (2, 2)) assert u._size_padding == ((1, 1), (3, 3), (4, 4)) assert u._size_padding.left == u._size_padding.right == (1, 3, 4) assert u._size_nodomain == ((3, 3), (5, 5), (6, 6)) assert u._size_nodomain.left == u._size_nodomain.right == (3, 5, 6) assert u._size_nopad == (8, 8, 8) assert u._offset_domain == (3, 5, 6) assert u._offset_halo == ((1, 7), (3, 9), (4, 10)) assert u._offset_halo.left == (1, 3, 4) assert u._offset_halo.right == (7, 9, 10) assert u._offset_owned == ((3, 5), (5, 7), (6, 8)) def test_w_halo_w_padding(self): grid = Grid(shape=(4, 4, 4)) u = Function(name='u', grid=grid, space_order=(2, 1, 4), padding=((1, 1), (2, 2), (3, 3))) assert u._size_halo == ((1, 4), (1, 4), (1, 4)) assert u._size_owned == ((4, 1), (4, 1), (4, 1)) assert u._size_nodomain == ((2, 5), (3, 6), (4, 7)) assert u._size_nodomain.left == (2, 3, 4) assert u._size_nodomain.right == (5, 6, 7) assert u._size_nopad == (9, 9, 9) assert u._offset_domain == (2, 3, 4) assert u._offset_halo == ((1, 6), (2, 7), (3, 8)) assert u._offset_owned == ((2, 5), (3, 6), (4, 7)) @switchconfig(autopadding=True, platform='bdw') # Platform is to fix pad value def test_w_halo_w_autopadding(self): grid = Grid(shape=(4, 4, 4)) u0 = Function(name='u0', grid=grid, space_order=0) u1 = Function(name='u1', grid=grid, space_order=3) assert configuration['platform'].simd_items_per_reg(u1.dtype) == 8 assert u0._size_halo == ((0, 0), (0, 0), (0, 0)) assert u0._size_padding == ((0, 0), (0, 0), (0, 12)) assert u0._size_nodomain == u0._size_padding assert u0.shape_allocated == (4, 4, 16) assert u1._size_halo == ((3, 3), (3, 3), (3, 3)) assert u1._size_padding == ((0, 0), (0, 0), (0, 14)) # 14 stems from 6 + 8 assert u1._size_nodomain == ((3, 3), (3, 3), (3, 17)) assert u1.shape_allocated == (10, 10, 24) class TestDecomposition(object): """ Notes ----- If these tests don't work, there is no chance that the tests in TestDataDistributed will pass. """ def test_glb_to_loc_index_conversions(self): d = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 2) # A global index as single argument assert d.index_glb_to_loc(5) == 0 assert d.index_glb_to_loc(6) == 1 assert d.index_glb_to_loc(7) == 2 assert d.index_glb_to_loc(3) is None # Retrieve relative local min/man given global min/max assert d.index_glb_to_loc((5, 7)) == (0, 2) assert d.index_glb_to_loc((5, 9)) == (0, 2) assert d.index_glb_to_loc((1, 3)) == (-1, -3) assert d.index_glb_to_loc((1, 6)) == (0, 1) assert d.index_glb_to_loc((None, None)) == (0, 2) # Retrieve absolute local min/man given global min/max assert d.index_glb_to_loc((5, 7), rel=False) == (5, 7) assert d.index_glb_to_loc((5, 9), rel=False) == (5, 7) assert d.index_glb_to_loc((1, 3), rel=False) == (-1, -3) assert d.index_glb_to_loc((1, 6), rel=False) == (5, 6) assert d.index_glb_to_loc((None, None), rel=False) == (5, 7) def test_glb_to_loc_w_side(self): d = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 2) # A global index as single argument assert d.index_glb_to_loc(5, LEFT) == 0 assert d.index_glb_to_loc(6, RIGHT) == 2 assert d.index_glb_to_loc(7, LEFT) == 2 assert d.index_glb_to_loc(4, RIGHT) == 0 assert d.index_glb_to_loc(6, LEFT) == 1 assert d.index_glb_to_loc(5, RIGHT) == 1 assert d.index_glb_to_loc(2, LEFT) is None assert d.index_glb_to_loc(3, RIGHT) is None def test_loc_to_glb_index_conversions(self): d = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 2) # Convert local indices to global indices assert d.index_loc_to_glb((0, 2)) == (5, 7) d2 = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 0) assert d2.index_loc_to_glb((0, 2)) == (0, 2) d3 = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 3) assert d3.index_loc_to_glb((1, 3)) == (9, 11) def test_convert_index(self): d0 = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 2) d1 = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 3) decomposition = (d0, d1) idx0 = (5, slice(8, 11, 1)) result0 = [] for i, j in zip(idx0, decomposition): result0.append(convert_index(i, j)) expected0 = (0, slice(0, 3, 1)) assert as_tuple(result0) == expected0 def test_reshape_identity(self): d = Decomposition([[0, 1], [2, 3]], 2) # Identity decomposition assert len(d.reshape(0, 0)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(0, 0), [[0, 1], [2, 3]])) def test_reshape_right_only(self): d = Decomposition([[0, 1], [2, 3]], 2) # Extension at right only assert len(d.reshape(0, 2)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(0, 2), [[0, 1], [2, 3, 4, 5]])) # Reduction at right affecting one sub-domain only, but not the whole subdomain assert len(d.reshape(0, -1)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(0, -1), [[0, 1], [2]])) # Reduction at right over one whole sub-domain assert len(d.reshape(0, -2)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(0, -2), [[0, 1], []])) # Reduction at right over multiple sub-domains assert len(d.reshape(0, -3)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(0, -3), [[0], []])) def test_reshape_left_only(self): d = Decomposition([[0, 1], [2, 3]], 2) # Extension at left only assert len(d.reshape(2, 0)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(2, 0), [[0, 1, 2, 3], [4, 5]])) # Reduction at left affecting one sub-domain only, but not the whole subdomain assert len(d.reshape(-1, 0)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(-1, 0), [[0], [1, 2]])) # Reduction at left over one whole sub-domain assert len(d.reshape(-2, 0)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(-2, 0), [[], [0, 1]])) # Reduction at right over multiple sub-domains assert len(d.reshape(-3, 0)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(-3, 0), [[], [0]])) def test_reshape_left_right(self): d = Decomposition([[0, 1], [2, 3]], 2) # Extension at both left and right assert len(d.reshape(1, 1)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(1, 1), [[0, 1, 2], [3, 4, 5]])) # Reduction at both left and right assert len(d.reshape(-1, -1)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(-1, -1), [[0], [1]])) # Reduction at both left and right, with the right one obliterating one subdomain assert len(d.reshape(-1, -2)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(-1, -2), [[0], []])) # Reduction at both left and right obliterating all subdomains # triggering an exception assert len(d.reshape(-1, -3)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(-1, -3), [[], []])) assert len(d.reshape(-2, -2)) == 2 assert all(list(i) == j for i, j in zip(d.reshape(-1, -3), [[], []])) def test_reshape_slice(self): d = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 2) assert d.reshape(slice(None)) == d assert d.reshape(slice(2, 9)) == Decomposition([[0], [1, 2], [3, 4, 5], [6]], 2) assert d.reshape(slice(3, 5)) == Decomposition([[], [0, 1], [], []], 2) assert d.reshape(slice(3, 3)) == Decomposition([[], [], [], []], 2) assert d.reshape(slice(13, 13)) == Decomposition([[], [], [], []], 2) assert d.reshape(slice(2, None)) == Decomposition([[0], [1, 2], [3, 4, 5], [6, 7, 8, 9]], 2) assert d.reshape(slice(4)) == Decomposition([[0, 1, 2], [3], [], []], 2) assert d.reshape(slice(-2, 2)) == Decomposition([[0, 1, 2, 3], [], [], []], 2) assert d.reshape(slice(-2)) == Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9]], 2) assert d.reshape(slice(3, -1)) == Decomposition([[], [0, 1], [2, 3, 4], [5, 6, 7]], 2) def test_reshape_iterable(self): d = Decomposition([[0, 1, 2], [3, 4], [5, 6, 7], [8, 9, 10, 11]], 2) assert d.reshape(()) == Decomposition([[], [], [], []], 2) assert d.reshape((1, 3, 5)) == Decomposition([[0], [1], [2], []], 2) assert d.reshape((1, 3, 10, 11)) == Decomposition([[0], [1], [], [2, 3]], 2) assert d.reshape((1, 3, 10, 11, 14)) == Decomposition([[0], [1], [], [2, 3]], 2) class TestDataDistributed(object): """ Test Data indexing and manipulation when distributed over a set of MPI processes. """ @pytest.mark.parallel(mode=4) def test_localviews(self): grid = Grid(shape=(4, 4)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map myrank = grid.distributor.myrank u = Function(name='u', grid=grid) u.data[:] = grid.distributor.myrank assert u.data_ro_domain._local[0, 0] == grid.distributor.myrank assert u.data_ro_domain._local[1, 1] == grid.distributor.myrank assert u.data_ro_domain._local[-1, -1] == grid.distributor.myrank assert u.data_ro_with_halo._local[1, 1] == grid.distributor.myrank if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data_ro_with_halo._local[1:, 1:] == myrank) assert np.all(u.data_ro_with_halo._local[0] == 0.) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(u.data_ro_with_halo._local[1:3, :2] == myrank) assert np.all(u.data_ro_with_halo._local[0] == 0.) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data_ro_with_halo._local[:2, 1:3] == myrank) assert np.all(u.data_ro_with_halo._local[2] == 0.) else: assert np.all(u.data_ro_with_halo._local[:2, :2] == myrank) assert np.all(u.data_ro_with_halo._local[2] == 0.) @pytest.mark.parallel(mode=4) def test_trivial_insertion(self): grid = Grid(shape=(4, 4)) u = Function(name='u', grid=grid, space_order=0) v = Function(name='v', grid=grid, space_order=1) u.data[:] = 1. assert np.all(u.data == 1.) assert np.all(u.data._local == 1.) v.data_with_halo[:] = 1. assert v.data_with_halo[:].shape == (3, 3) assert np.all(v.data_with_halo == 1.) assert np.all(v.data_with_halo[:] == 1.) assert np.all(v.data_with_halo._local == 1.) @pytest.mark.parallel(mode=4) def test_indexing(self): grid = Grid(shape=(4, 4)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map myrank = grid.distributor.myrank u = Function(name='u', grid=grid, space_order=0) u.data[:] = myrank if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert u.data[0, 0] == myrank assert u.data[2, 2] is None assert u.data[2].size == 0 assert u.data[:, 2].size == 0 elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert u.data[0, 0] is None assert u.data[2, 2] is None assert u.data[2].size == 0 assert np.all(u.data[:, 2] == [myrank, myrank]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert u.data[0, 0] is None assert u.data[2, 2] is None assert np.all(u.data[2] == [myrank, myrank]) assert u.data[:, 2].size == 0 else: assert u.data[0, 0] is None assert u.data[2, 2] == myrank assert np.all(u.data[2] == [myrank, myrank]) assert np.all(u.data[:, 2] == [myrank, myrank]) @pytest.mark.parallel(mode=4) def test_slicing(self): grid = Grid(shape=(4, 4)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map myrank = grid.distributor.myrank u = Function(name='u', grid=grid, space_order=0) u.data[:] = myrank # `u.data` is a view of the global data array restricted, on each rank, # to the local rank domain, so it must be == myrank assert np.all(u.data == myrank) assert np.all(u.data._local == myrank) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data[:2, :2] == myrank) assert u.data[:2, 2:].size == u.data[2:, :2].size == u.data[2:, 2:].size == 0 elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(u.data[:2, 2:] == myrank) assert u.data[:2, :2].size == u.data[2:, :2].size == u.data[2:, 2:].size == 0 elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data[2:, :2] == myrank) assert u.data[:2, 2:].size == u.data[:2, :2].size == u.data[2:, 2:].size == 0 else: assert np.all(u.data[2:, 2:] == myrank) assert u.data[:2, 2:].size == u.data[2:, :2].size == u.data[:2, :2].size == 0 @pytest.mark.parallel(mode=4) def test_slicing_ns(self): # Test slicing with a negative step grid = Grid(shape=(4, 4)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map myrank = grid.distributor.myrank u = Function(name='u', grid=grid, space_order=0) u.data[:] = myrank dat = np.arange(16, dtype=np.int32) dat = dat.reshape(grid.shape) u.data[::-1, ::-1] = dat if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data == [[15, 14], [11, 10]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(u.data == [[13, 12], [9, 8]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data == [[7, 6], [3, 2]]) else: assert np.all(u.data == [[5, 4], [1, 0]]) @pytest.mark.parallel(mode=4) def test_getitem(self): # __getitem__ mpi slicing tests: grid = Grid(shape=(8, 8)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map f = Function(name='f', grid=grid, space_order=0, dtype=np.int32) test_dat = np.arange(64, dtype=np.int32) a = test_dat.reshape(grid.shape) f.data[:] = a result = np.array(f.data[::-1, ::-1]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(result[0] == [[63, 62, 61, 60]]) assert np.all(result[1] == [[55, 54, 53, 52]]) assert np.all(result[2] == [[47, 46, 45, 44]]) assert np.all(result[3] == [[39, 38, 37, 36]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(result[0] == [[59, 58, 57, 56]]) assert np.all(result[1] == [[51, 50, 49, 48]]) assert np.all(result[2] == [[43, 42, 41, 40]]) assert np.all(result[3] == [[35, 34, 33, 32]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(result[0] == [[31, 30, 29, 28]]) assert np.all(result[1] == [[23, 22, 21, 20]]) assert np.all(result[2] == [[15, 14, 13, 12]]) assert np.all(result[3] == [[7, 6, 5, 4]]) else: assert np.all(result[0] == [[27, 26, 25, 24]]) assert np.all(result[1] == [[19, 18, 17, 16]]) assert np.all(result[2] == [[11, 10, 9, 8]]) assert np.all(result[3] == [[3, 2, 1, 0]]) result1 = np.array(f.data[5, 6:1:-1]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert result1.size == 0 elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert result1.size == 0 elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(result1 == [[46, 45]]) else: assert np.all(result1 == [[44, 43, 42]]) result2 = np.array(f.data[6:4:-1, 6:1:-1]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert result2.size == 0 elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert result2.size == 0 elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(result2[0] == [[54, 53]]) assert np.all(result2[1] == [[46, 45]]) else: assert np.all(result2[0] == [[52, 51, 50]]) assert np.all(result2[1] == [[44, 43, 42]]) result3 = np.array(f.data[6:4:-1, 2:7]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert result3.size == 0 elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert result3.size == 0 elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(result3[0] == [[50, 51]]) assert np.all(result3[1] == [[42, 43]]) else: assert np.all(result3[0] == [[52, 53, 54]]) assert np.all(result3[1] == [[44, 45, 46]]) result4 = np.array(f.data[4:2:-1, 6:1:-1]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(result4 == [[38, 37]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(result4 == [[36, 35, 34]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(result4 == [[30, 29]]) else: assert np.all(result4 == [[28, 27, 26]]) @pytest.mark.parallel(mode=4) def test_big_steps(self): # Test slicing with a step size > 1 grid = Grid(shape=(8, 8)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map f = Function(name='f', grid=grid, space_order=0, dtype=np.int32) test_dat = np.arange(64, dtype=np.int32) a = test_dat.reshape(grid.shape) f.data[:] = a r0 = np.array(f.data[::3, ::3]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(r0 == [[0, 3], [24, 27]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(r0 == [[6], [30]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(r0 == [[48, 51]]) else: assert np.all(r0 == [[54]]) r1 = np.array(f.data[1::3, 1::3]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(r1 == [[9]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(r1 == [[12, 15]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(r1 == [[33], [57]]) else: assert np.all(r1 == [[36, 39], [60, 63]]) r2 = np.array(f.data[::-3, ::-3]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(r2 == [[63]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(r2 == [[60, 57]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(r2 == [[39], [15]]) else: assert np.all(r2 == [[36, 33], [12, 9]]) r3 = np.array(f.data[6::-3, 6::-3]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(r3 == [[54, 51], [30, 27]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(r3 == [[48], [24]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(r3 == [[6, 3]]) else: assert np.all(r3 == [[0]]) @pytest.mark.parallel(mode=4) def test_setitem(self): # __setitem__ mpi slicing tests grid = Grid(shape=(12, 12)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map g = Function(name='g', grid=grid, space_order=0, dtype=np.int32) h = Function(name='h', grid=grid, space_order=0, dtype=np.int32) grid1 = Grid(shape=(8, 8)) f = Function(name='f', grid=grid1, space_order=0, dtype=np.int32) test_dat = np.arange(64, dtype=np.int32) a = test_dat.reshape(grid1.shape) f.data[:] = a g.data[0, 0:3] = f.data[7, 4:7] if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(np.array(g.data) == [[60, 61, 62, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(np.array(g.data)) == 0 elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(np.array(g.data)) == 0 else: assert np.all(np.array(g.data)) == 0 h.data[2:10, 2:10] = f.data[::-1, ::-1] if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(np.array(h.data) == [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 63, 62, 61, 60], [0, 0, 55, 54, 53, 52], [0, 0, 47, 46, 45, 44], [0, 0, 39, 38, 37, 36]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(np.array(h.data) == [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [59, 58, 57, 56, 0, 0], [51, 50, 49, 48, 0, 0], [43, 42, 41, 40, 0, 0], [35, 34, 33, 32, 0, 0]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(np.array(h.data) == [[0, 0, 31, 30, 29, 28], [0, 0, 23, 22, 21, 20], [0, 0, 15, 14, 13, 12], [0, 0, 7, 6, 5, 4], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]) else: assert np.all(np.array(h.data) == [[27, 26, 25, 24, 0, 0], [19, 18, 17, 16, 0, 0], [11, 10, 9, 8, 0, 0], [3, 2, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]) @pytest.mark.parallel(mode=4) def test_hd_slicing(self): # Test higher dimension slices grid = Grid(shape=(4, 4, 4)) x, y, z = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map t = Function(name='t', grid=grid, space_order=0) dat = np.arange(64, dtype=np.int32) b = dat.reshape(grid.shape) t.data[:] = b c = np.array(t.data[::-1, ::-1, ::-1]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(c[:, :, 0] == [[63, 59], [47, 43]]) assert np.all(c[:, :, 3] == [[60, 56], [44, 40]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(c[:, :, 0] == [[55, 51], [39, 35]]) assert np.all(c[:, :, 3] == [[52, 48], [36, 32]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(c[:, :, 0] == [[31, 27], [15, 11]]) assert np.all(c[:, :, 3] == [[28, 24], [12, 8]]) else: assert np.all(c[:, :, 0] == [[23, 19], [7, 3]]) assert np.all(c[:, :, 3] == [[20, 16], [4, 0]]) d = np.array(t.data[::-1]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(d[:, :, 1] == [[49, 53], [33, 37]]) assert np.all(d[:, :, 2] == [[50, 54], [34, 38]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(d[:, :, 1] == [[57, 61], [41, 45]]) assert np.all(d[:, :, 2] == [[58, 62], [42, 46]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(d[:, :, 1] == [[17, 21], [1, 5]]) assert np.all(d[:, :, 2] == [[18, 22], [2, 6]]) else: assert np.all(d[:, :, 1] == [[25, 29], [9, 13]]) assert np.all(d[:, :, 2] == [[26, 30], [10, 14]]) e = np.array(t.data[:, 3:2:-1]) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert e.size == 0 elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(e[:, :, 0] == [[12], [28]]) assert np.all(e[:, :, 1] == [[13], [29]]) assert np.all(e[:, :, 2] == [[14], [30]]) assert np.all(e[:, :, 3] == [[15], [31]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert e.size == 0 else: assert np.all(e[:, :, 0] == [[44], [60]]) assert np.all(e[:, :, 1] == [[45], [61]]) assert np.all(e[:, :, 2] == [[46], [62]]) assert np.all(e[:, :, 3] == [[47], [63]]) @pytest.mark.parallel(mode=4) def test_niche_slicing(self): grid0 = Grid(shape=(8, 8)) x0, y0 = grid0.dimensions glb_pos_map0 = grid0.distributor.glb_pos_map f = Function(name='f', grid=grid0, space_order=0, dtype=np.int32) dat = np.arange(64, dtype=np.int32) a = dat.reshape(grid0.shape) f.data[:] = a grid1 = Grid(shape=(12, 12)) x1, y1 = grid1.dimensions glb_pos_map1 = grid1.distributor.glb_pos_map h = Function(name='h', grid=grid1, space_order=0, dtype=np.int32) grid2 = Grid(shape=(4, 4, 4)) t = Function(name='t', grid=grid2, space_order=0) b = dat.reshape(grid2.shape) t.data[:] = b tdat0 = np.array(f.data[-2::, -2::]) if LEFT in glb_pos_map0[x0] and LEFT in glb_pos_map0[y0]: assert tdat0.size == 0 elif LEFT in glb_pos_map0[x0] and RIGHT in glb_pos_map0[y0]: assert tdat0.size == 0 elif RIGHT in glb_pos_map0[x0] and LEFT in glb_pos_map0[y0]: assert tdat0.size == 0 else: assert np.all(tdat0 == [[54, 55], [62, 63]]) h.data[9:1:-1, 9:1:-1] = f.data[:, :] if LEFT in glb_pos_map1[x1] and LEFT in glb_pos_map1[y1]: assert np.all(np.array(h.data) == [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 63, 62, 61, 60], [0, 0, 55, 54, 53, 52], [0, 0, 47, 46, 45, 44], [0, 0, 39, 38, 37, 36]]) elif LEFT in glb_pos_map1[x1] and RIGHT in glb_pos_map1[y1]: assert np.all(np.array(h.data) == [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [59, 58, 57, 56, 0, 0], [51, 50, 49, 48, 0, 0], [43, 42, 41, 40, 0, 0], [35, 34, 33, 32, 0, 0]]) elif RIGHT in glb_pos_map1[x1] and LEFT in glb_pos_map1[y1]: assert np.all(np.array(h.data) == [[0, 0, 31, 30, 29, 28], [0, 0, 23, 22, 21, 20], [0, 0, 15, 14, 13, 12], [0, 0, 7, 6, 5, 4], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]) else: assert np.all(np.array(h.data) == [[27, 26, 25, 24, 0, 0], [19, 18, 17, 16, 0, 0], [11, 10, 9, 8, 0, 0], [3, 2, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]) f.data[:] = 0 f.data[::2, ::2] = t.data[:, :, 0] if LEFT in glb_pos_map0[x0] and LEFT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[0, 0, 4, 0], [0, 0, 0, 0], [16, 0, 20, 0], [0, 0, 0, 0]]) elif LEFT in glb_pos_map0[x0] and RIGHT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[8, 0, 12, 0], [0, 0, 0, 0], [24, 0, 28, 0], [0, 0, 0, 0]]) elif RIGHT in glb_pos_map0[x0] and LEFT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[32, 0, 36, 0], [0, 0, 0, 0], [48, 0, 52, 0], [0, 0, 0, 0]]) else: assert np.all(np.array(f.data) == [[40, 0, 44, 0], [0, 0, 0, 0], [56, 0, 60, 0], [0, 0, 0, 0]]) f.data[:] = 0 f.data[1::2, 1::2] = t.data[:, :, 0] if LEFT in glb_pos_map0[x0] and LEFT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[0, 0, 0, 0], [0, 0, 0, 4], [0, 0, 0, 0], [0, 16, 0, 20]]) elif LEFT in glb_pos_map0[x0] and RIGHT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[0, 0, 0, 0], [0, 8, 0, 12], [0, 0, 0, 0], [0, 24, 0, 28]]) elif RIGHT in glb_pos_map0[x0] and LEFT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[0, 0, 0, 0], [0, 32, 0, 36], [0, 0, 0, 0], [0, 48, 0, 52]]) else: assert np.all(np.array(f.data) == [[0, 0, 0, 0], [0, 40, 0, 44], [0, 0, 0, 0], [0, 56, 0, 60]]) f.data[:] = 0 f.data[6::-2, 6::-2] = t.data[:, :, 0] if LEFT in glb_pos_map0[x0] and LEFT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[60, 0, 56, 0], [0, 0, 0, 0], [44, 0, 40, 0], [0, 0, 0, 0]]) elif LEFT in glb_pos_map0[x0] and RIGHT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[52, 0, 48, 0], [0, 0, 0, 0], [36, 0, 32, 0], [0, 0, 0, 0]]) elif RIGHT in glb_pos_map0[x0] and LEFT in glb_pos_map0[y0]: assert np.all(np.array(f.data) == [[28, 0, 24, 0], [0, 0, 0, 0], [12, 0, 8, 0], [0, 0, 0, 0]]) else: assert np.all(np.array(f.data) == [[20, 0, 16, 0], [0, 0, 0, 0], [4, 0, 0, 0], [0, 0, 0, 0]]) @pytest.mark.parallel(mode=4) @pytest.mark.parametrize('shape, slice0, slice1, slice2', [ ((31, 31, 31), (slice(None, None, 1), 2, slice(None, None, 1)), (slice(None, None, 1), 0, slice(None, None, 1)), (slice(None, None, 1), 1, slice(None, None, 1))), ((17, 17, 17), (slice(None, None, 1), slice(None, None, 1), 2), (slice(None, None, 1), slice(None, None, 1), 0), (slice(None, None, 1), slice(None, None, 1), 1)), ((8, 8, 8), (slice(None, None, 1), 5, slice(None, None, 1)), (slice(None, None, 1), 1, slice(None, None, 1)), (slice(None, None, 1), 7, slice(None, None, 1)))]) def test_niche_slicing2(self, shape, slice0, slice1, slice2): grid = Grid(shape=shape) f = Function(name='f', grid=grid) f.data[:] = 1 f.data[slice0] = f.data[slice1] f.data[slice0] += f.data[slice2] result0 = np.array(f.data[slice0]) expected0 = np.full(result0.shape, 2) assert(np.all(result0 == expected0)) result1 = np.array(f.data[slice1]) expected1 = np.full(result1.shape, 1) assert(np.all(result1 == expected1)) result2 = np.array(f.data[slice2]) expected2 = np.full(result2.shape, 1) assert(np.all(result2 == expected2)) @pytest.mark.parallel(mode=4) def test_neg_start_stop(self): grid0 = Grid(shape=(8, 8)) f = Function(name='f', grid=grid0, space_order=0, dtype=np.int32) dat = np.arange(64, dtype=np.int32) a = dat.reshape(grid0.shape) f.data[:] = a grid1 = Grid(shape=(12, 12)) x, y = grid1.dimensions glb_pos_map = grid1.distributor.glb_pos_map h = Function(name='h', grid=grid1, space_order=0, dtype=np.int32) slices = (slice(-3, -1, 1), slice(-1, -5, -1)) h.data[8:10, 0:4] = f.data[slices] if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.count_nonzero(h.data[:]) == 0 elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.count_nonzero(h.data[:]) == 0 elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(np.array(h.data) == [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [47, 46, 45, 44, 0, 0], [55, 54, 53, 52, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]) else: assert np.count_nonzero(h.data[:]) == 0 @pytest.mark.parallel(mode=4) def test_indexing_in_views(self): grid = Grid(shape=(4, 4)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map myrank = grid.distributor.myrank u = Function(name='u', grid=grid, space_order=0) u.data[:] = myrank # Note that the `1`s are global indices view = u.data[1:, 1:] assert np.all(view[:] == myrank) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert view.shape == (1, 1) assert np.all(view == 0.) assert view[0, 0] == 0. assert view[1, 1] is None assert view[1].shape == (0, 1) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert view.shape == (1, 2) assert np.all(view == 1.) assert view[0, 0] is None assert view[1, 1] is None assert view[1].shape == (0, 2) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert view.shape == (2, 1) assert np.all(view == 2.) assert view[0, 0] is None assert view[1, 1] is None assert view[1].shape == (1,) assert np.all(view[1] == 2.) else: assert view.shape == (2, 2) assert np.all(view == 3.) assert view[0, 0] is None assert view[1, 1] == 3. assert view[1].shape == (2,) assert np.all(view[1] == 3.) # Now we further slice into `view` view2 = view[1:, 1:] assert np.all(view2[:] == myrank) if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert view2.shape == (0, 0) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert view2.shape == (0, 2) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert view2.shape == (2, 0) else: assert view2.shape == (2, 2) # Now a change in `view2` by the only rank that "sees" it should affect # both `view` and `u.data` view2[:] += 1 if RIGHT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(u.data[:] == myrank + 1) assert np.all(view[:] == myrank + 1) assert np.all(view2[:] == myrank + 1) else: assert np.all(view[:] == myrank) assert np.all(view2[:] == myrank) assert view2.size == 0 @pytest.mark.parallel(mode=4) def test_from_replicated_to_distributed(self): shape = (4, 4) grid = Grid(shape=shape) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map u = Function(name='u', grid=grid, space_order=0) # distributed v = Function(name='v', grid=grid, space_order=0) # distributed a = np.arange(16).reshape(shape) # replicated # Full array u.data[:] = a if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data == [[0, 1], [4, 5]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(u.data == [[2, 3], [6, 7]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data == [[8, 9], [12, 13]]) else: assert np.all(u.data == [[10, 11], [14, 15]]) # Subsection (all ranks touched) u.data[:] = 0 u.data[1:3, 1:3] = a[1:3, 1:3] # Same as above but with negative indices v.data[:] = 0 v.data[1:-1, 1:-1] = a[1:-1, 1:-1] if LEFT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data == [[0, 0], [0, 5]]) assert np.all(v.data == [[0, 0], [0, 5]]) elif LEFT in glb_pos_map[x] and RIGHT in glb_pos_map[y]: assert np.all(u.data == [[0, 0], [6, 0]]) assert np.all(v.data == [[0, 0], [6, 0]]) elif RIGHT in glb_pos_map[x] and LEFT in glb_pos_map[y]: assert np.all(u.data == [[0, 9], [0, 0]]) assert np.all(v.data == [[0, 9], [0, 0]]) else: assert np.all(u.data == [[10, 0], [0, 0]]) assert np.all(v.data == [[10, 0], [0, 0]]) # The assigned data must have same shape as the one of the distributed array, # otherwise an exception is expected try: u.data[1:3, 1:3] = a[1:2, 1:2] except ValueError: assert True except: assert False @pytest.mark.parallel(mode=4) def test_misc_setup(self): """Test setup of Functions with mixed distributed/replicated Dimensions.""" grid = Grid(shape=(4, 4)) _, y = grid.dimensions dy = Dimension(name='dy') # Note: `grid` must be passed to `c` since `x` is a distributed dimension, # and `grid` carries the `x` decomposition c = Function(name='c', grid=grid, dimensions=(y, dy), shape=(4, 5)) # The following should be identical to `c` in everything but the name c2 = Function(name='c2', grid=grid, dimensions=(y, dy), shape=(None, 5)) assert c.shape == c2.shape == (2, 5) assert c.shape_with_halo == c2.shape_with_halo assert c._decomposition == c2._decomposition # The following should all raise an exception as illegal try: Function(name='c3', grid=grid, dimensions=(y, dy)) assert False except TypeError: # Missing `shape` assert True # The following should all raise an exception as illegal try: Function(name='c4', grid=grid, dimensions=(y, dy), shape=(3, 5)) assert False except ValueError: # The provided y-size, 3, doesn't match the y-size in grid (4) assert True # The following should all raise an exception as illegal try: Function(name='c4', grid=grid, dimensions=(y, dy), shape=(4,)) assert False except ValueError: # Too few entries for `shape` (two expected, for `y` and `dy`) assert True @pytest.mark.parallel(mode=4) def test_misc_data(self): """ Test data insertion/indexing for Functions with mixed distributed/replicated Dimensions. """ dx = Dimension(name='dx') grid = Grid(shape=(4, 4)) x, y = grid.dimensions glb_pos_map = grid.distributor.glb_pos_map # Note: `grid` must be passed to `c` since `x` is a distributed dimension, # and `grid` carries the `x` decomposition c = Function(name='c', grid=grid, dimensions=(x, dx), shape=(4, 5)) # Data insertion for i in range(4): c.data[i, 0] = 1.0+i c.data[i, 1] = 1.0+i c.data[i, 2] = 3.0+i c.data[i, 3] = 6.0+i c.data[i, 4] = 5.0+i # Data indexing if LEFT in glb_pos_map[x]: assert(np.all(c.data[0] == [1., 1., 3., 6., 5.])) assert(np.all(c.data[1] == [2., 2., 4., 7., 6.])) else: assert(np.all(c.data[2] == [3., 3., 5., 8., 7.])) assert(np.all(c.data[3] == [4., 4., 6., 9., 8.])) # Same as before, but with negative indices and non-trivial slices if LEFT in glb_pos_map[x]: assert(np.all(c.data[0:-3] == [1., 1., 3., 6., 5.])) assert(np.all(c.data[-3:-2] == [2., 2., 4., 7., 6.])) else: assert(np.all(c.data[-2:-1] == [3., 3., 5., 8., 7.])) assert(np.all(c.data[-1] == [4., 4., 6., 9., 8.])) class TestDataGather(object): @pytest.mark.parallel(mode=4) @pytest.mark.parametrize('rank', [0, 1, 2, 3]) def test_simple_gather(self, rank): """ Test a simple gather on various ranks.""" grid = Grid(shape=(10, 10), extent=(9, 9)) f = Function(name='f', grid=grid, dtype=np.int32) res = np.arange(100).reshape(grid.shape) f.data[:] = res myrank = grid._distributor.comm.Get_rank() ans = f.data_gather(rank=rank) if myrank == rank: assert np.all(ans == res) else: assert ans.shape == (0, )*len(grid.shape) @pytest.mark.parallel(mode=4) @pytest.mark.parametrize('start, stop, step', [ (None, None, None), (None, None, 2), (None, None, -1), (None, None, -2), (1, 8, 3), ((0, 4), None, (2, 1))]) def test_sliced_gather_2D(self, start, stop, step): """ Test gather for various 2D slices.""" grid = Grid(shape=(10, 10), extent=(9, 9)) f = Function(name='f', grid=grid, dtype=np.int32) dat = np.arange(100).reshape(grid.shape) if isinstance(step, int) or step is None: step = [step for _ in grid.shape] if isinstance(start, int) or start is None: start = [start for _ in grid.shape] if isinstance(stop, int) or stop is None: stop = [stop for _ in grid.shape] idx = [] for i, j, k in zip(start, stop, step): idx.append(slice(i, j, k)) idx = tuple(idx) res = dat[idx] f.data[:] = dat myrank = grid._distributor.comm.Get_rank() ans = f.data_gather(start=start, stop=stop, step=step) if myrank == 0: assert np.all(ans == res) else: assert ans.shape == (0, )*len(grid.shape) @pytest.mark.parallel(mode=4) @pytest.mark.parametrize('start, stop, step', [ (None, None, None), (None, None, 2), (None, None, -1), (None, None, -2), (1, 8, 3), ((0, 4, 4), None, (2, 1, 1))]) def test_sliced_gather_3D(self, start, stop, step): """ Test gather for various 3D slices.""" grid = Grid(shape=(10, 10, 10), extent=(9, 9, 9)) f = Function(name='f', grid=grid, dtype=np.int32) dat = np.arange(1000).reshape(grid.shape) if isinstance(step, int) or step is None: step = [step for _ in grid.shape] if isinstance(start, int) or start is None: start = [start for _ in grid.shape] if isinstance(stop, int) or stop is None: stop = [stop for _ in grid.shape] idx = [] for i, j, k in zip(start, stop, step): idx.append(slice(i, j, k)) idx = tuple(idx) res = dat[idx] f.data[:] = dat myrank = grid._distributor.comm.Get_rank() ans = f.data_gather(start=start, stop=stop, step=step) if myrank == 0: assert np.all(ans == res) else: assert ans.shape == (0, )*len(grid.shape) def test_scalar_arg_substitution(): """ Tests the relaxed (compared to other devito sympy subclasses) substitution semantics for scalars, which is used for argument substitution into symbolic expressions. """ t0 = Scalar(name='t0').indexify() t1 = Scalar(name='t1').indexify() assert t0 != 0 assert t0.subs('t0', 2) == 2 assert t0.subs('t0', t1) == t1 @pytest.mark.skip(reason="will corrupt memory and risk crash") def test_oob_noguard(): """ Tests the guard page allocator. This writes to memory it shouldn't, and typically gets away with it. """ # A tiny grid grid = Grid(shape=(4, 4)) u = Function(name='u', grid=grid, space_order=0, allocator=ALLOC_FLAT) Operator(Eq(u[2000, 0], 1.0)).apply() @pytest.mark.skip(reason="will crash entire test suite") def test_oob_guard(): """ Tests the guard page allocator. This causes a segfault in the test suite, deliberately. """ # A tiny grid grid = Grid(shape=(4, 4)) u = Function(name='u', grid=grid, space_order=0, allocator=ALLOC_GUARD) Operator(Eq(u[2000, 0], 1.0)).apply() def test_numpy_c_contiguous(): """ Test that devito.Data is correctly reported by NumPy as being C-contiguous """ grid = Grid(shape=(4, 4)) u = Function(name='u', grid=grid, space_order=2) assert(u._data_allocated.flags.c_contiguous) def test_external_allocator(): shape = (2, 2) space_order = 0 numpy_array = np.ones(shape, dtype=np.float32) g = Grid(shape) f = Function(name='f', space_order=space_order, grid=g, allocator=ExternalAllocator(numpy_array), initializer=lambda x: None) # Ensure the two arrays have the same value assert(np.array_equal(f.data, numpy_array)) # Ensure the original numpy array is unchanged assert(np.array_equal(numpy_array, np.ones(shape, dtype=np.float32))) # Change the underlying numpy array numpy_array[:] = 3. # Ensure the function.data changes too assert(np.array_equal(f.data, numpy_array)) # Change the function.data f.data[:] = 4. # Ensure the underlying numpy array changes too assert(np.array_equal(f.data, numpy_array)) if __name__ == "__main__": configuration['mpi'] = True TestDataDistributed().test_misc_data()
opesci/devito
tests/test_data.py
devito/passes/clusters/asynchrony.py
import six import chainer from chainer import backend from chainer import function_node from chainer.utils import type_check class Tile(function_node.FunctionNode): """Tiling of an array.""" def __init__(self, reps): if isinstance(reps, six.integer_types): self.reps = (reps,) elif isinstance(reps, tuple) and all( isinstance(x, six.integer_types) for x in reps): self.reps = reps else: msg = 'reps must be int or tuple of ints.\n' \ 'Actual: {0}'.format(type(reps)) raise TypeError(msg) if not all(x >= 0 for x in self.reps): raise ValueError('All elements in reps must be zero or larger') def check_type_forward(self, in_types): type_check.expect(in_types.size() == 1) def forward(self, inputs): self._in_shape = inputs[0].shape xp = backend.get_array_module(*inputs) return xp.tile(inputs[0], self.reps), def backward(self, indexes, grad_outputs): reps = self.reps shape = tuple(self._in_shape) ndim = len(shape) # Ensure input and reps have the same length. if ndim > len(reps): reps = (1,) * (ndim - len(reps)) + reps elif ndim < len(reps): shape = (1,) * (len(reps) - ndim) + shape gy, = grad_outputs # Reshape so that base axis and reps axis can be distinguished. new_shape = [] for i in range(gy.ndim): new_shape.append(reps[i]) new_shape.append(shape[i]) new_shape = tuple(new_shape) # Sum along reps axis reps_axis = tuple(range(0, 2 * gy.ndim, 2)) gy = gy.reshape(new_shape) gy = chainer.functions.sum(gy, axis=reps_axis) if ndim < len(reps): return gy.reshape(self._in_shape), else: return gy, def tile(x, reps): """Construct an array by tiling a given array. Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable. Let the length of ``reps`` be ``d``. If ``x.ndim < d``, ``x`` is treated as ``d``-dimensional array by prepending new axes. For example, when the shape of ``x`` is ``(2,)`` and tiled with 2-dim repetitions, ``x`` is treated as the shape ``(1, 2)``. If ``x.ndim > d``, ``reps`` is treated as ``x.ndim``-dimensional by pre-pending 1's. For example, when the shape of ``x`` is ``(2, 3, 2, 3)``, the 2-dim ``reps`` of ``(2, 2)`` is treated as ``(1, 1, 2, 2)``. reps (:class:`int` or :class:`tuple` of :class:`int` s): The number of times which ``x`` is replicated along each axis. Returns: ~chainer.Variable: The tiled output Variable. Let the length of ``reps`` be ``d``, the output has the dimension of ``max(d, x.ndim)``. .. admonition:: Example >>> x = np.array([0, 1, 2]) >>> x.shape (3,) >>> y = F.tile(x, 2) >>> y.shape (6,) >>> y.array array([0, 1, 2, 0, 1, 2]) >>> y = F.tile(x, (2, 2)) >>> y.shape (2, 6) >>> y.array array([[0, 1, 2, 0, 1, 2], [0, 1, 2, 0, 1, 2]]) >>> y = F.tile(x, (2, 1, 2)) >>> y.shape (2, 1, 6) >>> y.array array([[[0, 1, 2, 0, 1, 2]], <BLANKLINE> [[0, 1, 2, 0, 1, 2]]]) >>> x = np.array([[1, 2], [3, 4]]) >>> x.shape (2, 2) >>> y = F.tile(x, 2) >>> y.shape (2, 4) >>> y.array array([[1, 2, 1, 2], [3, 4, 3, 4]]) >>> y = F.tile(x, (2, 2)) >>> y.shape (4, 4) >>> y.array array([[1, 2, 1, 2], [3, 4, 3, 4], [1, 2, 1, 2], [3, 4, 3, 4]]) >>> y = F.tile(x, (2, 1, 2)) >>> y.shape (2, 2, 4) >>> y.array array([[[1, 2, 1, 2], [3, 4, 3, 4]], <BLANKLINE> [[1, 2, 1, 2], [3, 4, 3, 4]]]) """ return Tile(reps).apply((x,))[0]
import os import warnings import chainer import chainer.functions as F import chainer.links as L from chainer import testing import numpy as np import onnx import pytest from onnx_chainer import export from onnx_chainer import export_testcase from onnx_chainer import onnx_helper from onnx_chainer.replace_func import as_funcnode from onnx_chainer.replace_func import fake_as_funcnode from onnx_chainer.testing import input_generator from onnx_chainer_tests.helper import ONNXModelChecker from onnx_chainer_tests.helper import ONNXModelTest def test_fake_as_funcnode_without_replace(): class Model(chainer.Chain): def _init__(self): super().__init__() def add(self, xs, value=0.01): return xs.array + value def __call__(self, xs): return F.sigmoid(self.add(xs)) model = Model() x = input_generator.increasing(3, 4) onnx_model = export(model, x) sigmoid_nodes = [ node for node in onnx_model.graph.node if node.op_type == 'Sigmoid'] assert len(sigmoid_nodes) == 1 # sigmoid node should be expected to connect with input # but the connection is cut because `add` method takes array. assert not sigmoid_nodes[0].input[0] == 'Input_0' class TestReplaceNumpyFullToConstantOfShape(ONNXModelTest): # This test case is a real-world example, to handle np.full def test_output(self): class Model(chainer.Chain): def __init__(self, value): super().__init__() self.value = value @as_funcnode('NumpyFull') def full(self, xs, value=0): # not support `def full(self, xs_shape, value=0)` # wrapped function node cannot handle shape directly yet. return np.full(xs.array.shape, value, dtype=np.float32) def __call__(self, xs): return F.sigmoid(self.full(xs, value=self.value)) model = Model(value=5) x = input_generator.increasing(2, 3, 4) def numpy_full_converter(params): gb = onnx_helper.GraphBuilder() output = gb.op('Shape', params.input_names) value = onnx.helper.make_tensor( 'value', onnx.TensorProto.FLOAT, [1], [params.func.value]) gb.op_output_named( 'ConstantOfShape', [output], params.output_names, value=value) return gb.nodes() addon_converters = {'NumpyFull': numpy_full_converter} self.expect( model, x, skip_opset_version=[7, 8], external_converters=addon_converters) class TestReplaceWithOutputGrad(ONNXModelChecker): def get_model(self): class Model(chainer.Chain): def __init__(self): super().__init__() with self.init_scope(): self.l = L.Linear(None, 2) def half(self, xs, value=0.5): return xs * value def forward(self, xs): h = self.l(xs) h = self.half(h) return F.sum(chainer.as_variable(h)) return Model() def test_grad_error(self): model = self.get_model() # this alternative function does not return chainer.Variable # backward propagation will fail model.half = fake_as_funcnode( lambda xs, value=0.5: xs.array * value, 'MulConstant') x = input_generator.increasing(2, 5) with pytest.raises(ValueError): self.expect(model, x, output_grad=True) def test_output(self, tmpdir): # first, make expected gradients to temp directory expected_result_path = str(tmpdir) model = self.get_model() x = input_generator.increasing(2, 5) export_testcase(model, x, expected_result_path, output_grad=True) data_set_name = 'test_data_set_0' expected_gradients = [os.path.join( expected_result_path, data_set_name, 'gradient_{}.pb').format(i) for i in range(2)] assert all([os.path.isfile(path) for path in expected_gradients]) # model.half returns chainer.Variable and enabled backward # regardless using replacing model.half = fake_as_funcnode(model.half, 'MulConstant') x = input_generator.increasing(2, 5) def gradient_check(model, path): actual_gradients = [os.path.join( path, data_set_name, 'gradient_{}.pb').format(i) for i in range(2)] assert all([os.path.isfile(path) for path in actual_gradients]) def load_tensor(path): tensor = onnx.load_tensor(path) return onnx.numpy_helper.to_array(tensor) for e_path, a_path in zip(expected_gradients, actual_gradients): expected = load_tensor(e_path) actual = load_tensor(a_path) np.testing.assert_allclose(expected, actual) self.expect( model, x, output_grad=True, custom_model_test_func=gradient_check) class TestReplaceFuncBackward(ONNXModelTest): def _test_replace_func(self, fn, xs, set_grad=False): def make_list(v): if isinstance(v, (list, tuple)): return list(v) else: return [v] xvs = [x for x in xs if isinstance(x, chainer.Variable)] rfn = as_funcnode('fn')(fn) eys = make_list(fn(*xs)) egxs = chainer.grad(eys, xvs, set_grad=set_grad) ays = make_list(rfn(*xs)) agxs = chainer.grad(ays, xvs, set_grad=set_grad) assert len(eys) == len(ays) for ay, ey in zip(ays, eys): np.testing.assert_allclose(ay.array, ey.array) assert len(egxs) == len(agxs) for agx, egx in zip(agxs, egxs): if egx is None: assert egx is None else: np.testing.assert_allclose(agx.array, egx.array) def test_backward_simple(self): self._test_replace_func(lambda a, b: a * b, [chainer.Variable(np.array(2.3)), chainer.Variable(np.array(4.2))]) def test_backward_partially_differentiable(self): self._test_replace_func(lambda a, b: a * b.array, [chainer.Variable(np.array(2.3)), chainer.Variable(np.array(4.2))]) def test_backward_multi_outputs(self): self._test_replace_func(lambda a, b, c: (a * b, a / b, a * b * c), [chainer.Variable(np.array(2.3)), chainer.Variable(np.array(4.2)), 5]) def test_backward_no_side_effect(self): a = chainer.Variable(np.array(2.3)) b = chainer.Variable(np.array(4.2)) x0 = a * b x1 = chainer.Variable(np.array(3.7)) self._test_replace_func(lambda a, b: a * b, [x0, x1]) # No side-effect to `grad`. assert x0.grad is None assert x1.grad is None assert a.grad is None assert b.grad is None # Gradient computation must stop at `x0` and `x1`. self._test_replace_func(lambda a, b: a * b, [x0, x1], set_grad=True) assert x0.grad is not None assert x1.grad is not None assert a.grad is None assert b.grad is None @testing.parameterize( {'func_kind': 'list', 'in_shape': (2, 3, 4), 'op_type': 'Add'}, {'func_kind': 'list_kwargs', 'in_shape': (2, 3, 4), 'op_type': 'Add'}, {'func_kind': 'var_with_deco', 'in_shape': (3, 4), 'op_type': 'AddConstant'}, {'func_kind': 'var_kwargs', 'in_shape': (3, 4), 'op_type': 'AddConstant'}, {'func_kind': 'var', 'in_shape': (3, 4), 'op_type': 'AddConstant'}, ) class TestReplaceFunc(ONNXModelTest): def get_model(self, target_func, input_converter): class Model(chainer.Chain): def __init__(self, target_func, input_converter): super().__init__() self.input_converter = input_converter self.fn = target_func def __call__(self, xs): args, kwargs = self.input_converter(xs) h = self.fn(*args, **kwargs) return F.sigmoid(h) return Model(target_func, input_converter) def test_output(self): attr = None is_deco = False if self.func_kind == 'list': def input_converter(xs): return ([xs[0], xs[1]],), {} def target_func(xs): return xs[0].array + xs[1].array elif self.func_kind == 'list_kwargs': def input_converter(xs): return (), {'xs': [xs[0], xs[1]]} def target_func(xs=None): assert xs is not None return xs[0].array + xs[1].array elif self.func_kind == 'var_with_deco': def input_converter(xs): return (xs,), {} @as_funcnode('AddConstant', rename_attributes=[('b', 'value')]) def target_func(x, b=0.01): return x.array + b is_deco = True elif self.func_kind == 'var_kwargs': def input_converter(xs): return (), {'x': xs, 'value': 0.02} def target_func(x=None, value=0.01): assert x is not None return x.array + value else: assert self.func_kind == 'var' def input_converter(xs): return (xs, 0.01), {} def target_func(x, value): return x.array + value attr = [(1, 'value')] model = self.get_model(target_func, input_converter) x = input_generator.increasing(*self.in_shape) if not is_deco: model.fn = fake_as_funcnode( model.fn, self.op_type, rename_attributes=attr) name = 'replace_func_' + self.func_kind self.expect(model, x, name=name) @pytest.mark.parametrize('return_type', ['list', 'dict']) def test_replace_func_collection_return(tmpdir, return_type): path = str(tmpdir) class Model(chainer.Chain): def __init__(self, return_type): super().__init__() self.return_type = return_type def tiled_array(self, xs, n=5): if self.return_type == 'list': return [xs.array * i for i in range(1, 1+n)] else: assert self.return_type == 'dict' return {str(i): xs.array * i for i in range(1, 1+n)} def __call__(self, xs): return self.tiled_array(xs) model = Model(return_type) x = input_generator.increasing(1, 5) with warnings.catch_warnings(record=True): model.tiled_array = fake_as_funcnode(model.tiled_array, 'xTiledArray') def tiled_array_converter(params): return onnx_helper.make_node( 'xTiledArray', params.input_names, params.output_names), addon_converters = {'xTiledArray': tiled_array_converter} with testing.assert_warns(UserWarning): export_testcase(model, x, path, external_converters=addon_converters) model_filepath = os.path.join(path, 'model.onnx') assert os.path.isfile(model_filepath) onnx_model = onnx.load(model_filepath) node_names = [n.name for n in onnx_model.graph.node] assert len(node_names) == 1 assert node_names[0] == 'xTiledArray_0' output_names = [n.name for n in onnx_model.graph.output] assert len(output_names) == 5 for i, name in enumerate(output_names): assert name == 'xTiledArray_0_{:d}'.format(i) def test_fake_as_funcnode_keep_structure(tmpdir): path = str(tmpdir) class Model(chainer.Chain): def __init__(self): super().__init__() def f(self, x): return {'a': (x, x+1), 'b': [x+2, x+3, x+4]} def __call__(self, x): ret = self.f(x) return ret['a'][0] + ret['b'][1] model = Model() x = input_generator.increasing(2, 3) with warnings.catch_warnings(record=True): model.f = fake_as_funcnode(model.f, 'xF') def f_converter(params): return onnx_helper.make_node( 'xF', params.input_names, params.output_names), addon_converters = {'xF': f_converter} with testing.assert_warns(UserWarning): export_testcase(model, x, path, external_converters=addon_converters) model_filepath = os.path.join(path, 'model.onnx') assert os.path.isfile(model_filepath) onnx_model = onnx.load(model_filepath) node_names = [n.name for n in onnx_model.graph.node] assert len(node_names) == 2 assert node_names[0] == 'xF_0' assert len(onnx_model.graph.node[0].output) == 5 assert len(onnx_model.graph.output) == 1
pfnet/chainer
tests/onnx_chainer_tests/test_replace_func.py
chainer/functions/array/tile.py
from chainer import backend from chainer import function_node from chainer.utils import type_check class FlipLR(function_node.FunctionNode): """Flip array in the left/right direction.""" def check_type_forward(self, in_types): type_check._argname(in_types, ('a',)) a_type = in_types[0] type_check.expect( a_type.dtype.kind == 'f', a_type.ndim >= 2 ) def forward(self, inputs): xp = backend.get_array_module(*inputs) return xp.fliplr(inputs[0]), def backward(self, indexes, grad_outputs): return FlipLR().apply(grad_outputs) def fliplr(a): """Flip array in the left/right direction. Args: a (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable. Returns: ~chainer.Variable: Output variable. """ return FlipLR().apply((a,))[0]
import os import warnings import chainer import chainer.functions as F import chainer.links as L from chainer import testing import numpy as np import onnx import pytest from onnx_chainer import export from onnx_chainer import export_testcase from onnx_chainer import onnx_helper from onnx_chainer.replace_func import as_funcnode from onnx_chainer.replace_func import fake_as_funcnode from onnx_chainer.testing import input_generator from onnx_chainer_tests.helper import ONNXModelChecker from onnx_chainer_tests.helper import ONNXModelTest def test_fake_as_funcnode_without_replace(): class Model(chainer.Chain): def _init__(self): super().__init__() def add(self, xs, value=0.01): return xs.array + value def __call__(self, xs): return F.sigmoid(self.add(xs)) model = Model() x = input_generator.increasing(3, 4) onnx_model = export(model, x) sigmoid_nodes = [ node for node in onnx_model.graph.node if node.op_type == 'Sigmoid'] assert len(sigmoid_nodes) == 1 # sigmoid node should be expected to connect with input # but the connection is cut because `add` method takes array. assert not sigmoid_nodes[0].input[0] == 'Input_0' class TestReplaceNumpyFullToConstantOfShape(ONNXModelTest): # This test case is a real-world example, to handle np.full def test_output(self): class Model(chainer.Chain): def __init__(self, value): super().__init__() self.value = value @as_funcnode('NumpyFull') def full(self, xs, value=0): # not support `def full(self, xs_shape, value=0)` # wrapped function node cannot handle shape directly yet. return np.full(xs.array.shape, value, dtype=np.float32) def __call__(self, xs): return F.sigmoid(self.full(xs, value=self.value)) model = Model(value=5) x = input_generator.increasing(2, 3, 4) def numpy_full_converter(params): gb = onnx_helper.GraphBuilder() output = gb.op('Shape', params.input_names) value = onnx.helper.make_tensor( 'value', onnx.TensorProto.FLOAT, [1], [params.func.value]) gb.op_output_named( 'ConstantOfShape', [output], params.output_names, value=value) return gb.nodes() addon_converters = {'NumpyFull': numpy_full_converter} self.expect( model, x, skip_opset_version=[7, 8], external_converters=addon_converters) class TestReplaceWithOutputGrad(ONNXModelChecker): def get_model(self): class Model(chainer.Chain): def __init__(self): super().__init__() with self.init_scope(): self.l = L.Linear(None, 2) def half(self, xs, value=0.5): return xs * value def forward(self, xs): h = self.l(xs) h = self.half(h) return F.sum(chainer.as_variable(h)) return Model() def test_grad_error(self): model = self.get_model() # this alternative function does not return chainer.Variable # backward propagation will fail model.half = fake_as_funcnode( lambda xs, value=0.5: xs.array * value, 'MulConstant') x = input_generator.increasing(2, 5) with pytest.raises(ValueError): self.expect(model, x, output_grad=True) def test_output(self, tmpdir): # first, make expected gradients to temp directory expected_result_path = str(tmpdir) model = self.get_model() x = input_generator.increasing(2, 5) export_testcase(model, x, expected_result_path, output_grad=True) data_set_name = 'test_data_set_0' expected_gradients = [os.path.join( expected_result_path, data_set_name, 'gradient_{}.pb').format(i) for i in range(2)] assert all([os.path.isfile(path) for path in expected_gradients]) # model.half returns chainer.Variable and enabled backward # regardless using replacing model.half = fake_as_funcnode(model.half, 'MulConstant') x = input_generator.increasing(2, 5) def gradient_check(model, path): actual_gradients = [os.path.join( path, data_set_name, 'gradient_{}.pb').format(i) for i in range(2)] assert all([os.path.isfile(path) for path in actual_gradients]) def load_tensor(path): tensor = onnx.load_tensor(path) return onnx.numpy_helper.to_array(tensor) for e_path, a_path in zip(expected_gradients, actual_gradients): expected = load_tensor(e_path) actual = load_tensor(a_path) np.testing.assert_allclose(expected, actual) self.expect( model, x, output_grad=True, custom_model_test_func=gradient_check) class TestReplaceFuncBackward(ONNXModelTest): def _test_replace_func(self, fn, xs, set_grad=False): def make_list(v): if isinstance(v, (list, tuple)): return list(v) else: return [v] xvs = [x for x in xs if isinstance(x, chainer.Variable)] rfn = as_funcnode('fn')(fn) eys = make_list(fn(*xs)) egxs = chainer.grad(eys, xvs, set_grad=set_grad) ays = make_list(rfn(*xs)) agxs = chainer.grad(ays, xvs, set_grad=set_grad) assert len(eys) == len(ays) for ay, ey in zip(ays, eys): np.testing.assert_allclose(ay.array, ey.array) assert len(egxs) == len(agxs) for agx, egx in zip(agxs, egxs): if egx is None: assert egx is None else: np.testing.assert_allclose(agx.array, egx.array) def test_backward_simple(self): self._test_replace_func(lambda a, b: a * b, [chainer.Variable(np.array(2.3)), chainer.Variable(np.array(4.2))]) def test_backward_partially_differentiable(self): self._test_replace_func(lambda a, b: a * b.array, [chainer.Variable(np.array(2.3)), chainer.Variable(np.array(4.2))]) def test_backward_multi_outputs(self): self._test_replace_func(lambda a, b, c: (a * b, a / b, a * b * c), [chainer.Variable(np.array(2.3)), chainer.Variable(np.array(4.2)), 5]) def test_backward_no_side_effect(self): a = chainer.Variable(np.array(2.3)) b = chainer.Variable(np.array(4.2)) x0 = a * b x1 = chainer.Variable(np.array(3.7)) self._test_replace_func(lambda a, b: a * b, [x0, x1]) # No side-effect to `grad`. assert x0.grad is None assert x1.grad is None assert a.grad is None assert b.grad is None # Gradient computation must stop at `x0` and `x1`. self._test_replace_func(lambda a, b: a * b, [x0, x1], set_grad=True) assert x0.grad is not None assert x1.grad is not None assert a.grad is None assert b.grad is None @testing.parameterize( {'func_kind': 'list', 'in_shape': (2, 3, 4), 'op_type': 'Add'}, {'func_kind': 'list_kwargs', 'in_shape': (2, 3, 4), 'op_type': 'Add'}, {'func_kind': 'var_with_deco', 'in_shape': (3, 4), 'op_type': 'AddConstant'}, {'func_kind': 'var_kwargs', 'in_shape': (3, 4), 'op_type': 'AddConstant'}, {'func_kind': 'var', 'in_shape': (3, 4), 'op_type': 'AddConstant'}, ) class TestReplaceFunc(ONNXModelTest): def get_model(self, target_func, input_converter): class Model(chainer.Chain): def __init__(self, target_func, input_converter): super().__init__() self.input_converter = input_converter self.fn = target_func def __call__(self, xs): args, kwargs = self.input_converter(xs) h = self.fn(*args, **kwargs) return F.sigmoid(h) return Model(target_func, input_converter) def test_output(self): attr = None is_deco = False if self.func_kind == 'list': def input_converter(xs): return ([xs[0], xs[1]],), {} def target_func(xs): return xs[0].array + xs[1].array elif self.func_kind == 'list_kwargs': def input_converter(xs): return (), {'xs': [xs[0], xs[1]]} def target_func(xs=None): assert xs is not None return xs[0].array + xs[1].array elif self.func_kind == 'var_with_deco': def input_converter(xs): return (xs,), {} @as_funcnode('AddConstant', rename_attributes=[('b', 'value')]) def target_func(x, b=0.01): return x.array + b is_deco = True elif self.func_kind == 'var_kwargs': def input_converter(xs): return (), {'x': xs, 'value': 0.02} def target_func(x=None, value=0.01): assert x is not None return x.array + value else: assert self.func_kind == 'var' def input_converter(xs): return (xs, 0.01), {} def target_func(x, value): return x.array + value attr = [(1, 'value')] model = self.get_model(target_func, input_converter) x = input_generator.increasing(*self.in_shape) if not is_deco: model.fn = fake_as_funcnode( model.fn, self.op_type, rename_attributes=attr) name = 'replace_func_' + self.func_kind self.expect(model, x, name=name) @pytest.mark.parametrize('return_type', ['list', 'dict']) def test_replace_func_collection_return(tmpdir, return_type): path = str(tmpdir) class Model(chainer.Chain): def __init__(self, return_type): super().__init__() self.return_type = return_type def tiled_array(self, xs, n=5): if self.return_type == 'list': return [xs.array * i for i in range(1, 1+n)] else: assert self.return_type == 'dict' return {str(i): xs.array * i for i in range(1, 1+n)} def __call__(self, xs): return self.tiled_array(xs) model = Model(return_type) x = input_generator.increasing(1, 5) with warnings.catch_warnings(record=True): model.tiled_array = fake_as_funcnode(model.tiled_array, 'xTiledArray') def tiled_array_converter(params): return onnx_helper.make_node( 'xTiledArray', params.input_names, params.output_names), addon_converters = {'xTiledArray': tiled_array_converter} with testing.assert_warns(UserWarning): export_testcase(model, x, path, external_converters=addon_converters) model_filepath = os.path.join(path, 'model.onnx') assert os.path.isfile(model_filepath) onnx_model = onnx.load(model_filepath) node_names = [n.name for n in onnx_model.graph.node] assert len(node_names) == 1 assert node_names[0] == 'xTiledArray_0' output_names = [n.name for n in onnx_model.graph.output] assert len(output_names) == 5 for i, name in enumerate(output_names): assert name == 'xTiledArray_0_{:d}'.format(i) def test_fake_as_funcnode_keep_structure(tmpdir): path = str(tmpdir) class Model(chainer.Chain): def __init__(self): super().__init__() def f(self, x): return {'a': (x, x+1), 'b': [x+2, x+3, x+4]} def __call__(self, x): ret = self.f(x) return ret['a'][0] + ret['b'][1] model = Model() x = input_generator.increasing(2, 3) with warnings.catch_warnings(record=True): model.f = fake_as_funcnode(model.f, 'xF') def f_converter(params): return onnx_helper.make_node( 'xF', params.input_names, params.output_names), addon_converters = {'xF': f_converter} with testing.assert_warns(UserWarning): export_testcase(model, x, path, external_converters=addon_converters) model_filepath = os.path.join(path, 'model.onnx') assert os.path.isfile(model_filepath) onnx_model = onnx.load(model_filepath) node_names = [n.name for n in onnx_model.graph.node] assert len(node_names) == 2 assert node_names[0] == 'xF_0' assert len(onnx_model.graph.node[0].output) == 5 assert len(onnx_model.graph.output) == 1
pfnet/chainer
tests/onnx_chainer_tests/test_replace_func.py
chainer/functions/array/fliplr.py
import numpy from chainer.backends import cuda from chainer import initializer # Original code from Berkeley FCN # https://github.com/shelhamer/fcn.berkeleyvision.org/blob/master/surgery.py def _get_linear_filter(size, ndim, upsampling=True): """Make a 2D and 3D linear kernel suitable for up/downsampling""" factor = (size + 1) // 2 if size % 2 == 1: center = factor - 1. else: center = factor - 0.5 slices = (slice(size),) * ndim og = numpy.ogrid[slices] filt = 1. for og_i in og: filt = filt * (1. - abs(og_i - center) / factor) if not upsampling: filt /= filt.sum() return filt class _SamplingFilter(initializer.Initializer): def __init__(self, upsampling=True, interpolation='linear', dtype=None): self._upsampling = upsampling if interpolation == 'linear': self._get_filter_func = _get_linear_filter else: raise ValueError( 'Unsupported interpolation method: {}'.format(interpolation)) super(_SamplingFilter, self).__init__(dtype) def __call__(self, array): if self.dtype is not None: assert array.dtype == self.dtype xp = cuda.get_array_module(array) in_c, out_c = array.shape[:2] assert in_c == out_c or out_c == 1 ksize = None for k in array.shape[2:]: if ksize is None: ksize = k else: if ksize != k: raise ValueError( 'ksize must be all same: {} != {}'.format(ksize, k)) filt = self._get_filter_func( ksize, ndim=array.ndim - 2, upsampling=self._upsampling) filt = xp.asarray(filt) array[...] = 0. if out_c == 1: array[xp.arange(in_c), 0, ...] = filt else: array[xp.arange(in_c), xp.arange(out_c), ...] = filt class UpsamplingDeconvFilter(_SamplingFilter): """Initializes array with upsampling filter. The array is initialized with a standard image upsampling weight. This initializer is often used as initial weight for :func:`~chainer.links.DeconvolutionND`. :func:`~chainer.links.DeconvolutionND` is expected that its `stride` is equal to `(ksize + 1) // 2`. Reference: Long et al., https://arxiv.org/abs/1411.4038 Attributes: interpolation (str): Upsampling interpolation method. Default is 'linear'. """ def __init__(self, interpolation='linear', dtype=None): if interpolation != 'linear': raise ValueError( 'Unsupported interpolation method: {}'.format(interpolation)) super(UpsamplingDeconvFilter, self).__init__( upsampling=True, interpolation=interpolation, dtype=dtype) class DownsamplingConvFilter(_SamplingFilter): """Initializes array with downsampling filter. The array is initialized with a standard image downsampling weight. This initializer is often used as initial weight for :func:`~chainer.links.ConvolutionND`. :func:`~chainer.links.ConvolutionND` is expected that its `stride` is equal to `(ksize + 1) // 2`. Reference: Long et al., https://arxiv.org/abs/1411.4038 Attributes: interpolation (str): Downsampling interpolation method. Default is 'linear'. """ def __init__(self, interpolation='linear', dtype=None): if interpolation != 'linear': raise ValueError( 'Unsupported interpolation method: {}'.format(interpolation)) super(DownsamplingConvFilter, self).__init__( upsampling=False, interpolation=interpolation, dtype=dtype)
import os import warnings import chainer import chainer.functions as F import chainer.links as L from chainer import testing import numpy as np import onnx import pytest from onnx_chainer import export from onnx_chainer import export_testcase from onnx_chainer import onnx_helper from onnx_chainer.replace_func import as_funcnode from onnx_chainer.replace_func import fake_as_funcnode from onnx_chainer.testing import input_generator from onnx_chainer_tests.helper import ONNXModelChecker from onnx_chainer_tests.helper import ONNXModelTest def test_fake_as_funcnode_without_replace(): class Model(chainer.Chain): def _init__(self): super().__init__() def add(self, xs, value=0.01): return xs.array + value def __call__(self, xs): return F.sigmoid(self.add(xs)) model = Model() x = input_generator.increasing(3, 4) onnx_model = export(model, x) sigmoid_nodes = [ node for node in onnx_model.graph.node if node.op_type == 'Sigmoid'] assert len(sigmoid_nodes) == 1 # sigmoid node should be expected to connect with input # but the connection is cut because `add` method takes array. assert not sigmoid_nodes[0].input[0] == 'Input_0' class TestReplaceNumpyFullToConstantOfShape(ONNXModelTest): # This test case is a real-world example, to handle np.full def test_output(self): class Model(chainer.Chain): def __init__(self, value): super().__init__() self.value = value @as_funcnode('NumpyFull') def full(self, xs, value=0): # not support `def full(self, xs_shape, value=0)` # wrapped function node cannot handle shape directly yet. return np.full(xs.array.shape, value, dtype=np.float32) def __call__(self, xs): return F.sigmoid(self.full(xs, value=self.value)) model = Model(value=5) x = input_generator.increasing(2, 3, 4) def numpy_full_converter(params): gb = onnx_helper.GraphBuilder() output = gb.op('Shape', params.input_names) value = onnx.helper.make_tensor( 'value', onnx.TensorProto.FLOAT, [1], [params.func.value]) gb.op_output_named( 'ConstantOfShape', [output], params.output_names, value=value) return gb.nodes() addon_converters = {'NumpyFull': numpy_full_converter} self.expect( model, x, skip_opset_version=[7, 8], external_converters=addon_converters) class TestReplaceWithOutputGrad(ONNXModelChecker): def get_model(self): class Model(chainer.Chain): def __init__(self): super().__init__() with self.init_scope(): self.l = L.Linear(None, 2) def half(self, xs, value=0.5): return xs * value def forward(self, xs): h = self.l(xs) h = self.half(h) return F.sum(chainer.as_variable(h)) return Model() def test_grad_error(self): model = self.get_model() # this alternative function does not return chainer.Variable # backward propagation will fail model.half = fake_as_funcnode( lambda xs, value=0.5: xs.array * value, 'MulConstant') x = input_generator.increasing(2, 5) with pytest.raises(ValueError): self.expect(model, x, output_grad=True) def test_output(self, tmpdir): # first, make expected gradients to temp directory expected_result_path = str(tmpdir) model = self.get_model() x = input_generator.increasing(2, 5) export_testcase(model, x, expected_result_path, output_grad=True) data_set_name = 'test_data_set_0' expected_gradients = [os.path.join( expected_result_path, data_set_name, 'gradient_{}.pb').format(i) for i in range(2)] assert all([os.path.isfile(path) for path in expected_gradients]) # model.half returns chainer.Variable and enabled backward # regardless using replacing model.half = fake_as_funcnode(model.half, 'MulConstant') x = input_generator.increasing(2, 5) def gradient_check(model, path): actual_gradients = [os.path.join( path, data_set_name, 'gradient_{}.pb').format(i) for i in range(2)] assert all([os.path.isfile(path) for path in actual_gradients]) def load_tensor(path): tensor = onnx.load_tensor(path) return onnx.numpy_helper.to_array(tensor) for e_path, a_path in zip(expected_gradients, actual_gradients): expected = load_tensor(e_path) actual = load_tensor(a_path) np.testing.assert_allclose(expected, actual) self.expect( model, x, output_grad=True, custom_model_test_func=gradient_check) class TestReplaceFuncBackward(ONNXModelTest): def _test_replace_func(self, fn, xs, set_grad=False): def make_list(v): if isinstance(v, (list, tuple)): return list(v) else: return [v] xvs = [x for x in xs if isinstance(x, chainer.Variable)] rfn = as_funcnode('fn')(fn) eys = make_list(fn(*xs)) egxs = chainer.grad(eys, xvs, set_grad=set_grad) ays = make_list(rfn(*xs)) agxs = chainer.grad(ays, xvs, set_grad=set_grad) assert len(eys) == len(ays) for ay, ey in zip(ays, eys): np.testing.assert_allclose(ay.array, ey.array) assert len(egxs) == len(agxs) for agx, egx in zip(agxs, egxs): if egx is None: assert egx is None else: np.testing.assert_allclose(agx.array, egx.array) def test_backward_simple(self): self._test_replace_func(lambda a, b: a * b, [chainer.Variable(np.array(2.3)), chainer.Variable(np.array(4.2))]) def test_backward_partially_differentiable(self): self._test_replace_func(lambda a, b: a * b.array, [chainer.Variable(np.array(2.3)), chainer.Variable(np.array(4.2))]) def test_backward_multi_outputs(self): self._test_replace_func(lambda a, b, c: (a * b, a / b, a * b * c), [chainer.Variable(np.array(2.3)), chainer.Variable(np.array(4.2)), 5]) def test_backward_no_side_effect(self): a = chainer.Variable(np.array(2.3)) b = chainer.Variable(np.array(4.2)) x0 = a * b x1 = chainer.Variable(np.array(3.7)) self._test_replace_func(lambda a, b: a * b, [x0, x1]) # No side-effect to `grad`. assert x0.grad is None assert x1.grad is None assert a.grad is None assert b.grad is None # Gradient computation must stop at `x0` and `x1`. self._test_replace_func(lambda a, b: a * b, [x0, x1], set_grad=True) assert x0.grad is not None assert x1.grad is not None assert a.grad is None assert b.grad is None @testing.parameterize( {'func_kind': 'list', 'in_shape': (2, 3, 4), 'op_type': 'Add'}, {'func_kind': 'list_kwargs', 'in_shape': (2, 3, 4), 'op_type': 'Add'}, {'func_kind': 'var_with_deco', 'in_shape': (3, 4), 'op_type': 'AddConstant'}, {'func_kind': 'var_kwargs', 'in_shape': (3, 4), 'op_type': 'AddConstant'}, {'func_kind': 'var', 'in_shape': (3, 4), 'op_type': 'AddConstant'}, ) class TestReplaceFunc(ONNXModelTest): def get_model(self, target_func, input_converter): class Model(chainer.Chain): def __init__(self, target_func, input_converter): super().__init__() self.input_converter = input_converter self.fn = target_func def __call__(self, xs): args, kwargs = self.input_converter(xs) h = self.fn(*args, **kwargs) return F.sigmoid(h) return Model(target_func, input_converter) def test_output(self): attr = None is_deco = False if self.func_kind == 'list': def input_converter(xs): return ([xs[0], xs[1]],), {} def target_func(xs): return xs[0].array + xs[1].array elif self.func_kind == 'list_kwargs': def input_converter(xs): return (), {'xs': [xs[0], xs[1]]} def target_func(xs=None): assert xs is not None return xs[0].array + xs[1].array elif self.func_kind == 'var_with_deco': def input_converter(xs): return (xs,), {} @as_funcnode('AddConstant', rename_attributes=[('b', 'value')]) def target_func(x, b=0.01): return x.array + b is_deco = True elif self.func_kind == 'var_kwargs': def input_converter(xs): return (), {'x': xs, 'value': 0.02} def target_func(x=None, value=0.01): assert x is not None return x.array + value else: assert self.func_kind == 'var' def input_converter(xs): return (xs, 0.01), {} def target_func(x, value): return x.array + value attr = [(1, 'value')] model = self.get_model(target_func, input_converter) x = input_generator.increasing(*self.in_shape) if not is_deco: model.fn = fake_as_funcnode( model.fn, self.op_type, rename_attributes=attr) name = 'replace_func_' + self.func_kind self.expect(model, x, name=name) @pytest.mark.parametrize('return_type', ['list', 'dict']) def test_replace_func_collection_return(tmpdir, return_type): path = str(tmpdir) class Model(chainer.Chain): def __init__(self, return_type): super().__init__() self.return_type = return_type def tiled_array(self, xs, n=5): if self.return_type == 'list': return [xs.array * i for i in range(1, 1+n)] else: assert self.return_type == 'dict' return {str(i): xs.array * i for i in range(1, 1+n)} def __call__(self, xs): return self.tiled_array(xs) model = Model(return_type) x = input_generator.increasing(1, 5) with warnings.catch_warnings(record=True): model.tiled_array = fake_as_funcnode(model.tiled_array, 'xTiledArray') def tiled_array_converter(params): return onnx_helper.make_node( 'xTiledArray', params.input_names, params.output_names), addon_converters = {'xTiledArray': tiled_array_converter} with testing.assert_warns(UserWarning): export_testcase(model, x, path, external_converters=addon_converters) model_filepath = os.path.join(path, 'model.onnx') assert os.path.isfile(model_filepath) onnx_model = onnx.load(model_filepath) node_names = [n.name for n in onnx_model.graph.node] assert len(node_names) == 1 assert node_names[0] == 'xTiledArray_0' output_names = [n.name for n in onnx_model.graph.output] assert len(output_names) == 5 for i, name in enumerate(output_names): assert name == 'xTiledArray_0_{:d}'.format(i) def test_fake_as_funcnode_keep_structure(tmpdir): path = str(tmpdir) class Model(chainer.Chain): def __init__(self): super().__init__() def f(self, x): return {'a': (x, x+1), 'b': [x+2, x+3, x+4]} def __call__(self, x): ret = self.f(x) return ret['a'][0] + ret['b'][1] model = Model() x = input_generator.increasing(2, 3) with warnings.catch_warnings(record=True): model.f = fake_as_funcnode(model.f, 'xF') def f_converter(params): return onnx_helper.make_node( 'xF', params.input_names, params.output_names), addon_converters = {'xF': f_converter} with testing.assert_warns(UserWarning): export_testcase(model, x, path, external_converters=addon_converters) model_filepath = os.path.join(path, 'model.onnx') assert os.path.isfile(model_filepath) onnx_model = onnx.load(model_filepath) node_names = [n.name for n in onnx_model.graph.node] assert len(node_names) == 2 assert node_names[0] == 'xF_0' assert len(onnx_model.graph.node[0].output) == 5 assert len(onnx_model.graph.output) == 1
pfnet/chainer
tests/onnx_chainer_tests/test_replace_func.py
chainer/initializers/sampling.py
import collections import os import sys import time import numpy from chainer.backends import cuda from chainer import link_hook # Select the best-resolution timer function try: _get_time = time.perf_counter except AttributeError: if os.name == 'nt': _get_time = time.clock else: _get_time = time.time class TimerHook(link_hook.LinkHook): """Link hook for measuring elapsed time of \ :meth:`Link.forward() <chainer.Link.forward>`. Example: Code example:: from chainer.link_hooks import TimerHook hook = TimerHook() with hook: trainer.run() hook.print_report() Output example:: LinkName ElapsedTime Occurrence Linear 41.42sec 2100 MLP 42.09sec 700 Classifier 42.39sec 700 where *LinkName* is the name of link that calls the hook, and *ElapsedTime* is the elapsed time the link consumed, and *Occurrence* is the number of calls. Warning: Call graph of links are hierarchical. That means reported elapsed times may be overlapping with each other and the sum may exceed the total time. Attributes: call_history: List of measurement results. It consists of pairs of the name of the link that calls this hook and the elapsed time the :meth:`forward` method of link consumes. """ name = 'TimerHook' table = {'sec': 1, 'ms': 10 ** 3, 'us': 10 ** 6, 'ns': 10 ** 9} def __init__(self): self.call_history = [] self._running_stack = [] self._depth = 0 self._total_time = 0 def _preprocess(self): if self.xp is numpy: start = _get_time() self._running_stack.append(start) else: assert self.xp is cuda.cupy start = cuda.Event() stop = cuda.Event() start.record() self._running_stack.append((start, stop)) self._depth += 1 def forward_preprocess(self, args): self.xp = args.link.xp self._preprocess() def _postprocess(self, link): if self.xp is numpy: start = self._running_stack.pop() stop = _get_time() elapsed_time = stop - start else: assert self.xp is cuda.cupy start, stop = self._running_stack.pop() stop.record() stop.synchronize() # Note that `get_elapsed_time` returns result in milliseconds elapsed_time = cuda.cupy.cuda.get_elapsed_time( start, stop) / 1000 self.call_history.append((link.__class__.__name__, elapsed_time)) assert self._depth > 0 self._depth -= 1 if self._depth == 0: self._total_time += elapsed_time def forward_postprocess(self, args): link = args.link assert link.xp == self.xp self._postprocess(link) def total_time(self): """Returns total elapsed time in seconds.""" return self._total_time def summary(self): """Returns a summary of time profiling in links. Returns: A summarized dictionary whose keys are link names and values are dictionaries of `elapsed_time` and `occurrence`. """ summary = collections.OrderedDict() for link_name, elapsed_time in self.call_history: if link_name not in summary: summary[link_name] = {'elapsed_time': 0, 'occurrence': 0} record = summary[link_name] record['elapsed_time'] += elapsed_time record['occurrence'] += 1 return summary def _choose_unit(self, second): """Choose optimal unit.""" factor = 1 for unit in ['sec', 'ms', 'us']: if second * factor >= 1: return factor, unit factor *= 1000.0 return factor, 'ns' def print_report(self, unit='auto', file=sys.stdout): """Prints a summary report of time profiling in links. Args: unit (str): Supplementary units used for computational times. `sec`, `ms`, `us`, `ns`, `auto`(default) and `auto_foreach` are supported. If `auto`, units of times are aligned to the largest, and if `auto_foreach`, units of times are adjusted for each element. """ entries = [['LinkName', 'ElapsedTime', 'Occurrence']] auto_foreach = (unit == 'auto_foreach') if unit == 'auto': max_time = max( record['elapsed_time'] for record in self.summary().values()) factor, unit = self._choose_unit(max_time) elif not auto_foreach: factor = self.table[unit] for link_name, record in self.summary().items(): second = record['elapsed_time'] if auto_foreach: factor, unit = self._choose_unit(second) elapsed_time = '%3.2f%s' % (second * factor, unit) occurrence = str(record['occurrence']) entries.append([link_name, elapsed_time, occurrence]) entry_widths = [] entry_widths.append(max(len(f) for f, _, _ in entries)) entry_widths.append(max(len(e) for _, e, _ in entries)) entry_widths.append(max(len(o) for _, _, o in entries)) template = ' '.join('{:>%d}' % w for w in entry_widths) for link_name, elapsed_time, occurrence in entries: line = template.format(link_name, elapsed_time, occurrence) file.write(line) file.write('\n') file.flush() # TODO(crcrpar): Support backward pre/post process. # See https://github.com/chainer/chainer/issues/5197
import os import warnings import chainer import chainer.functions as F import chainer.links as L from chainer import testing import numpy as np import onnx import pytest from onnx_chainer import export from onnx_chainer import export_testcase from onnx_chainer import onnx_helper from onnx_chainer.replace_func import as_funcnode from onnx_chainer.replace_func import fake_as_funcnode from onnx_chainer.testing import input_generator from onnx_chainer_tests.helper import ONNXModelChecker from onnx_chainer_tests.helper import ONNXModelTest def test_fake_as_funcnode_without_replace(): class Model(chainer.Chain): def _init__(self): super().__init__() def add(self, xs, value=0.01): return xs.array + value def __call__(self, xs): return F.sigmoid(self.add(xs)) model = Model() x = input_generator.increasing(3, 4) onnx_model = export(model, x) sigmoid_nodes = [ node for node in onnx_model.graph.node if node.op_type == 'Sigmoid'] assert len(sigmoid_nodes) == 1 # sigmoid node should be expected to connect with input # but the connection is cut because `add` method takes array. assert not sigmoid_nodes[0].input[0] == 'Input_0' class TestReplaceNumpyFullToConstantOfShape(ONNXModelTest): # This test case is a real-world example, to handle np.full def test_output(self): class Model(chainer.Chain): def __init__(self, value): super().__init__() self.value = value @as_funcnode('NumpyFull') def full(self, xs, value=0): # not support `def full(self, xs_shape, value=0)` # wrapped function node cannot handle shape directly yet. return np.full(xs.array.shape, value, dtype=np.float32) def __call__(self, xs): return F.sigmoid(self.full(xs, value=self.value)) model = Model(value=5) x = input_generator.increasing(2, 3, 4) def numpy_full_converter(params): gb = onnx_helper.GraphBuilder() output = gb.op('Shape', params.input_names) value = onnx.helper.make_tensor( 'value', onnx.TensorProto.FLOAT, [1], [params.func.value]) gb.op_output_named( 'ConstantOfShape', [output], params.output_names, value=value) return gb.nodes() addon_converters = {'NumpyFull': numpy_full_converter} self.expect( model, x, skip_opset_version=[7, 8], external_converters=addon_converters) class TestReplaceWithOutputGrad(ONNXModelChecker): def get_model(self): class Model(chainer.Chain): def __init__(self): super().__init__() with self.init_scope(): self.l = L.Linear(None, 2) def half(self, xs, value=0.5): return xs * value def forward(self, xs): h = self.l(xs) h = self.half(h) return F.sum(chainer.as_variable(h)) return Model() def test_grad_error(self): model = self.get_model() # this alternative function does not return chainer.Variable # backward propagation will fail model.half = fake_as_funcnode( lambda xs, value=0.5: xs.array * value, 'MulConstant') x = input_generator.increasing(2, 5) with pytest.raises(ValueError): self.expect(model, x, output_grad=True) def test_output(self, tmpdir): # first, make expected gradients to temp directory expected_result_path = str(tmpdir) model = self.get_model() x = input_generator.increasing(2, 5) export_testcase(model, x, expected_result_path, output_grad=True) data_set_name = 'test_data_set_0' expected_gradients = [os.path.join( expected_result_path, data_set_name, 'gradient_{}.pb').format(i) for i in range(2)] assert all([os.path.isfile(path) for path in expected_gradients]) # model.half returns chainer.Variable and enabled backward # regardless using replacing model.half = fake_as_funcnode(model.half, 'MulConstant') x = input_generator.increasing(2, 5) def gradient_check(model, path): actual_gradients = [os.path.join( path, data_set_name, 'gradient_{}.pb').format(i) for i in range(2)] assert all([os.path.isfile(path) for path in actual_gradients]) def load_tensor(path): tensor = onnx.load_tensor(path) return onnx.numpy_helper.to_array(tensor) for e_path, a_path in zip(expected_gradients, actual_gradients): expected = load_tensor(e_path) actual = load_tensor(a_path) np.testing.assert_allclose(expected, actual) self.expect( model, x, output_grad=True, custom_model_test_func=gradient_check) class TestReplaceFuncBackward(ONNXModelTest): def _test_replace_func(self, fn, xs, set_grad=False): def make_list(v): if isinstance(v, (list, tuple)): return list(v) else: return [v] xvs = [x for x in xs if isinstance(x, chainer.Variable)] rfn = as_funcnode('fn')(fn) eys = make_list(fn(*xs)) egxs = chainer.grad(eys, xvs, set_grad=set_grad) ays = make_list(rfn(*xs)) agxs = chainer.grad(ays, xvs, set_grad=set_grad) assert len(eys) == len(ays) for ay, ey in zip(ays, eys): np.testing.assert_allclose(ay.array, ey.array) assert len(egxs) == len(agxs) for agx, egx in zip(agxs, egxs): if egx is None: assert egx is None else: np.testing.assert_allclose(agx.array, egx.array) def test_backward_simple(self): self._test_replace_func(lambda a, b: a * b, [chainer.Variable(np.array(2.3)), chainer.Variable(np.array(4.2))]) def test_backward_partially_differentiable(self): self._test_replace_func(lambda a, b: a * b.array, [chainer.Variable(np.array(2.3)), chainer.Variable(np.array(4.2))]) def test_backward_multi_outputs(self): self._test_replace_func(lambda a, b, c: (a * b, a / b, a * b * c), [chainer.Variable(np.array(2.3)), chainer.Variable(np.array(4.2)), 5]) def test_backward_no_side_effect(self): a = chainer.Variable(np.array(2.3)) b = chainer.Variable(np.array(4.2)) x0 = a * b x1 = chainer.Variable(np.array(3.7)) self._test_replace_func(lambda a, b: a * b, [x0, x1]) # No side-effect to `grad`. assert x0.grad is None assert x1.grad is None assert a.grad is None assert b.grad is None # Gradient computation must stop at `x0` and `x1`. self._test_replace_func(lambda a, b: a * b, [x0, x1], set_grad=True) assert x0.grad is not None assert x1.grad is not None assert a.grad is None assert b.grad is None @testing.parameterize( {'func_kind': 'list', 'in_shape': (2, 3, 4), 'op_type': 'Add'}, {'func_kind': 'list_kwargs', 'in_shape': (2, 3, 4), 'op_type': 'Add'}, {'func_kind': 'var_with_deco', 'in_shape': (3, 4), 'op_type': 'AddConstant'}, {'func_kind': 'var_kwargs', 'in_shape': (3, 4), 'op_type': 'AddConstant'}, {'func_kind': 'var', 'in_shape': (3, 4), 'op_type': 'AddConstant'}, ) class TestReplaceFunc(ONNXModelTest): def get_model(self, target_func, input_converter): class Model(chainer.Chain): def __init__(self, target_func, input_converter): super().__init__() self.input_converter = input_converter self.fn = target_func def __call__(self, xs): args, kwargs = self.input_converter(xs) h = self.fn(*args, **kwargs) return F.sigmoid(h) return Model(target_func, input_converter) def test_output(self): attr = None is_deco = False if self.func_kind == 'list': def input_converter(xs): return ([xs[0], xs[1]],), {} def target_func(xs): return xs[0].array + xs[1].array elif self.func_kind == 'list_kwargs': def input_converter(xs): return (), {'xs': [xs[0], xs[1]]} def target_func(xs=None): assert xs is not None return xs[0].array + xs[1].array elif self.func_kind == 'var_with_deco': def input_converter(xs): return (xs,), {} @as_funcnode('AddConstant', rename_attributes=[('b', 'value')]) def target_func(x, b=0.01): return x.array + b is_deco = True elif self.func_kind == 'var_kwargs': def input_converter(xs): return (), {'x': xs, 'value': 0.02} def target_func(x=None, value=0.01): assert x is not None return x.array + value else: assert self.func_kind == 'var' def input_converter(xs): return (xs, 0.01), {} def target_func(x, value): return x.array + value attr = [(1, 'value')] model = self.get_model(target_func, input_converter) x = input_generator.increasing(*self.in_shape) if not is_deco: model.fn = fake_as_funcnode( model.fn, self.op_type, rename_attributes=attr) name = 'replace_func_' + self.func_kind self.expect(model, x, name=name) @pytest.mark.parametrize('return_type', ['list', 'dict']) def test_replace_func_collection_return(tmpdir, return_type): path = str(tmpdir) class Model(chainer.Chain): def __init__(self, return_type): super().__init__() self.return_type = return_type def tiled_array(self, xs, n=5): if self.return_type == 'list': return [xs.array * i for i in range(1, 1+n)] else: assert self.return_type == 'dict' return {str(i): xs.array * i for i in range(1, 1+n)} def __call__(self, xs): return self.tiled_array(xs) model = Model(return_type) x = input_generator.increasing(1, 5) with warnings.catch_warnings(record=True): model.tiled_array = fake_as_funcnode(model.tiled_array, 'xTiledArray') def tiled_array_converter(params): return onnx_helper.make_node( 'xTiledArray', params.input_names, params.output_names), addon_converters = {'xTiledArray': tiled_array_converter} with testing.assert_warns(UserWarning): export_testcase(model, x, path, external_converters=addon_converters) model_filepath = os.path.join(path, 'model.onnx') assert os.path.isfile(model_filepath) onnx_model = onnx.load(model_filepath) node_names = [n.name for n in onnx_model.graph.node] assert len(node_names) == 1 assert node_names[0] == 'xTiledArray_0' output_names = [n.name for n in onnx_model.graph.output] assert len(output_names) == 5 for i, name in enumerate(output_names): assert name == 'xTiledArray_0_{:d}'.format(i) def test_fake_as_funcnode_keep_structure(tmpdir): path = str(tmpdir) class Model(chainer.Chain): def __init__(self): super().__init__() def f(self, x): return {'a': (x, x+1), 'b': [x+2, x+3, x+4]} def __call__(self, x): ret = self.f(x) return ret['a'][0] + ret['b'][1] model = Model() x = input_generator.increasing(2, 3) with warnings.catch_warnings(record=True): model.f = fake_as_funcnode(model.f, 'xF') def f_converter(params): return onnx_helper.make_node( 'xF', params.input_names, params.output_names), addon_converters = {'xF': f_converter} with testing.assert_warns(UserWarning): export_testcase(model, x, path, external_converters=addon_converters) model_filepath = os.path.join(path, 'model.onnx') assert os.path.isfile(model_filepath) onnx_model = onnx.load(model_filepath) node_names = [n.name for n in onnx_model.graph.node] assert len(node_names) == 2 assert node_names[0] == 'xF_0' assert len(onnx_model.graph.node[0].output) == 5 assert len(onnx_model.graph.output) == 1
pfnet/chainer
tests/onnx_chainer_tests/test_replace_func.py
chainer/link_hooks/timer.py
import math import warnings import numpy import chainer from chainer.backends import cuda from chainer import function_node from chainer.functions.math import exponential from chainer import utils from chainer.utils import type_check _ndtr_cpu = None def _slow_ndtr_cpu(x): return 0.5 * math.erfc(-x / 2 ** 0.5) class Ndtr(function_node.FunctionNode): @property def label(self): return 'ndtr' def check_type_forward(self, in_types): type_check.expect(in_types.size() == 1) type_check.expect(in_types[0].dtype.kind == 'f') def forward_cpu(self, x): global _ndtr_cpu if _ndtr_cpu is None: try: from scipy import special _ndtr_cpu = special.ndtr except ImportError: warnings.warn( 'SciPy is not available. Forward computation of ndtr in' ' CPU can be slow without SciPy.', chainer.warnings.PerformanceWarning) _ndtr_cpu = numpy.vectorize(_slow_ndtr_cpu) self.retain_inputs((0,)) return utils.force_array(_ndtr_cpu(x[0]), dtype=x[0].dtype), def forward_gpu(self, x): self.retain_inputs((0,)) return cuda.elementwise( 'T x', 'T y', 'y = normcdf(x)', 'elementwise_ndtr', )(x[0]), def backward(self, indexes, gy): x = self.get_retained_inputs()[0] return (2 * numpy.pi) ** -0.5 * exponential.exp(-0.5 * x ** 2) * gy[0], def ndtr(x): """Elementwise cumulative distribution function of normal distribution. .. note:: Forward computation in CPU can be slow if `SciPy <https://www.scipy.org/>`_ is not available. Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable. Returns: ~chainer.Variable: Output variable. """ return Ndtr().apply((x,))[0]
import os import warnings import chainer import chainer.functions as F import chainer.links as L from chainer import testing import numpy as np import onnx import pytest from onnx_chainer import export from onnx_chainer import export_testcase from onnx_chainer import onnx_helper from onnx_chainer.replace_func import as_funcnode from onnx_chainer.replace_func import fake_as_funcnode from onnx_chainer.testing import input_generator from onnx_chainer_tests.helper import ONNXModelChecker from onnx_chainer_tests.helper import ONNXModelTest def test_fake_as_funcnode_without_replace(): class Model(chainer.Chain): def _init__(self): super().__init__() def add(self, xs, value=0.01): return xs.array + value def __call__(self, xs): return F.sigmoid(self.add(xs)) model = Model() x = input_generator.increasing(3, 4) onnx_model = export(model, x) sigmoid_nodes = [ node for node in onnx_model.graph.node if node.op_type == 'Sigmoid'] assert len(sigmoid_nodes) == 1 # sigmoid node should be expected to connect with input # but the connection is cut because `add` method takes array. assert not sigmoid_nodes[0].input[0] == 'Input_0' class TestReplaceNumpyFullToConstantOfShape(ONNXModelTest): # This test case is a real-world example, to handle np.full def test_output(self): class Model(chainer.Chain): def __init__(self, value): super().__init__() self.value = value @as_funcnode('NumpyFull') def full(self, xs, value=0): # not support `def full(self, xs_shape, value=0)` # wrapped function node cannot handle shape directly yet. return np.full(xs.array.shape, value, dtype=np.float32) def __call__(self, xs): return F.sigmoid(self.full(xs, value=self.value)) model = Model(value=5) x = input_generator.increasing(2, 3, 4) def numpy_full_converter(params): gb = onnx_helper.GraphBuilder() output = gb.op('Shape', params.input_names) value = onnx.helper.make_tensor( 'value', onnx.TensorProto.FLOAT, [1], [params.func.value]) gb.op_output_named( 'ConstantOfShape', [output], params.output_names, value=value) return gb.nodes() addon_converters = {'NumpyFull': numpy_full_converter} self.expect( model, x, skip_opset_version=[7, 8], external_converters=addon_converters) class TestReplaceWithOutputGrad(ONNXModelChecker): def get_model(self): class Model(chainer.Chain): def __init__(self): super().__init__() with self.init_scope(): self.l = L.Linear(None, 2) def half(self, xs, value=0.5): return xs * value def forward(self, xs): h = self.l(xs) h = self.half(h) return F.sum(chainer.as_variable(h)) return Model() def test_grad_error(self): model = self.get_model() # this alternative function does not return chainer.Variable # backward propagation will fail model.half = fake_as_funcnode( lambda xs, value=0.5: xs.array * value, 'MulConstant') x = input_generator.increasing(2, 5) with pytest.raises(ValueError): self.expect(model, x, output_grad=True) def test_output(self, tmpdir): # first, make expected gradients to temp directory expected_result_path = str(tmpdir) model = self.get_model() x = input_generator.increasing(2, 5) export_testcase(model, x, expected_result_path, output_grad=True) data_set_name = 'test_data_set_0' expected_gradients = [os.path.join( expected_result_path, data_set_name, 'gradient_{}.pb').format(i) for i in range(2)] assert all([os.path.isfile(path) for path in expected_gradients]) # model.half returns chainer.Variable and enabled backward # regardless using replacing model.half = fake_as_funcnode(model.half, 'MulConstant') x = input_generator.increasing(2, 5) def gradient_check(model, path): actual_gradients = [os.path.join( path, data_set_name, 'gradient_{}.pb').format(i) for i in range(2)] assert all([os.path.isfile(path) for path in actual_gradients]) def load_tensor(path): tensor = onnx.load_tensor(path) return onnx.numpy_helper.to_array(tensor) for e_path, a_path in zip(expected_gradients, actual_gradients): expected = load_tensor(e_path) actual = load_tensor(a_path) np.testing.assert_allclose(expected, actual) self.expect( model, x, output_grad=True, custom_model_test_func=gradient_check) class TestReplaceFuncBackward(ONNXModelTest): def _test_replace_func(self, fn, xs, set_grad=False): def make_list(v): if isinstance(v, (list, tuple)): return list(v) else: return [v] xvs = [x for x in xs if isinstance(x, chainer.Variable)] rfn = as_funcnode('fn')(fn) eys = make_list(fn(*xs)) egxs = chainer.grad(eys, xvs, set_grad=set_grad) ays = make_list(rfn(*xs)) agxs = chainer.grad(ays, xvs, set_grad=set_grad) assert len(eys) == len(ays) for ay, ey in zip(ays, eys): np.testing.assert_allclose(ay.array, ey.array) assert len(egxs) == len(agxs) for agx, egx in zip(agxs, egxs): if egx is None: assert egx is None else: np.testing.assert_allclose(agx.array, egx.array) def test_backward_simple(self): self._test_replace_func(lambda a, b: a * b, [chainer.Variable(np.array(2.3)), chainer.Variable(np.array(4.2))]) def test_backward_partially_differentiable(self): self._test_replace_func(lambda a, b: a * b.array, [chainer.Variable(np.array(2.3)), chainer.Variable(np.array(4.2))]) def test_backward_multi_outputs(self): self._test_replace_func(lambda a, b, c: (a * b, a / b, a * b * c), [chainer.Variable(np.array(2.3)), chainer.Variable(np.array(4.2)), 5]) def test_backward_no_side_effect(self): a = chainer.Variable(np.array(2.3)) b = chainer.Variable(np.array(4.2)) x0 = a * b x1 = chainer.Variable(np.array(3.7)) self._test_replace_func(lambda a, b: a * b, [x0, x1]) # No side-effect to `grad`. assert x0.grad is None assert x1.grad is None assert a.grad is None assert b.grad is None # Gradient computation must stop at `x0` and `x1`. self._test_replace_func(lambda a, b: a * b, [x0, x1], set_grad=True) assert x0.grad is not None assert x1.grad is not None assert a.grad is None assert b.grad is None @testing.parameterize( {'func_kind': 'list', 'in_shape': (2, 3, 4), 'op_type': 'Add'}, {'func_kind': 'list_kwargs', 'in_shape': (2, 3, 4), 'op_type': 'Add'}, {'func_kind': 'var_with_deco', 'in_shape': (3, 4), 'op_type': 'AddConstant'}, {'func_kind': 'var_kwargs', 'in_shape': (3, 4), 'op_type': 'AddConstant'}, {'func_kind': 'var', 'in_shape': (3, 4), 'op_type': 'AddConstant'}, ) class TestReplaceFunc(ONNXModelTest): def get_model(self, target_func, input_converter): class Model(chainer.Chain): def __init__(self, target_func, input_converter): super().__init__() self.input_converter = input_converter self.fn = target_func def __call__(self, xs): args, kwargs = self.input_converter(xs) h = self.fn(*args, **kwargs) return F.sigmoid(h) return Model(target_func, input_converter) def test_output(self): attr = None is_deco = False if self.func_kind == 'list': def input_converter(xs): return ([xs[0], xs[1]],), {} def target_func(xs): return xs[0].array + xs[1].array elif self.func_kind == 'list_kwargs': def input_converter(xs): return (), {'xs': [xs[0], xs[1]]} def target_func(xs=None): assert xs is not None return xs[0].array + xs[1].array elif self.func_kind == 'var_with_deco': def input_converter(xs): return (xs,), {} @as_funcnode('AddConstant', rename_attributes=[('b', 'value')]) def target_func(x, b=0.01): return x.array + b is_deco = True elif self.func_kind == 'var_kwargs': def input_converter(xs): return (), {'x': xs, 'value': 0.02} def target_func(x=None, value=0.01): assert x is not None return x.array + value else: assert self.func_kind == 'var' def input_converter(xs): return (xs, 0.01), {} def target_func(x, value): return x.array + value attr = [(1, 'value')] model = self.get_model(target_func, input_converter) x = input_generator.increasing(*self.in_shape) if not is_deco: model.fn = fake_as_funcnode( model.fn, self.op_type, rename_attributes=attr) name = 'replace_func_' + self.func_kind self.expect(model, x, name=name) @pytest.mark.parametrize('return_type', ['list', 'dict']) def test_replace_func_collection_return(tmpdir, return_type): path = str(tmpdir) class Model(chainer.Chain): def __init__(self, return_type): super().__init__() self.return_type = return_type def tiled_array(self, xs, n=5): if self.return_type == 'list': return [xs.array * i for i in range(1, 1+n)] else: assert self.return_type == 'dict' return {str(i): xs.array * i for i in range(1, 1+n)} def __call__(self, xs): return self.tiled_array(xs) model = Model(return_type) x = input_generator.increasing(1, 5) with warnings.catch_warnings(record=True): model.tiled_array = fake_as_funcnode(model.tiled_array, 'xTiledArray') def tiled_array_converter(params): return onnx_helper.make_node( 'xTiledArray', params.input_names, params.output_names), addon_converters = {'xTiledArray': tiled_array_converter} with testing.assert_warns(UserWarning): export_testcase(model, x, path, external_converters=addon_converters) model_filepath = os.path.join(path, 'model.onnx') assert os.path.isfile(model_filepath) onnx_model = onnx.load(model_filepath) node_names = [n.name for n in onnx_model.graph.node] assert len(node_names) == 1 assert node_names[0] == 'xTiledArray_0' output_names = [n.name for n in onnx_model.graph.output] assert len(output_names) == 5 for i, name in enumerate(output_names): assert name == 'xTiledArray_0_{:d}'.format(i) def test_fake_as_funcnode_keep_structure(tmpdir): path = str(tmpdir) class Model(chainer.Chain): def __init__(self): super().__init__() def f(self, x): return {'a': (x, x+1), 'b': [x+2, x+3, x+4]} def __call__(self, x): ret = self.f(x) return ret['a'][0] + ret['b'][1] model = Model() x = input_generator.increasing(2, 3) with warnings.catch_warnings(record=True): model.f = fake_as_funcnode(model.f, 'xF') def f_converter(params): return onnx_helper.make_node( 'xF', params.input_names, params.output_names), addon_converters = {'xF': f_converter} with testing.assert_warns(UserWarning): export_testcase(model, x, path, external_converters=addon_converters) model_filepath = os.path.join(path, 'model.onnx') assert os.path.isfile(model_filepath) onnx_model = onnx.load(model_filepath) node_names = [n.name for n in onnx_model.graph.node] assert len(node_names) == 2 assert node_names[0] == 'xF_0' assert len(onnx_model.graph.node[0].output) == 5 assert len(onnx_model.graph.output) == 1
pfnet/chainer
tests/onnx_chainer_tests/test_replace_func.py
chainer/functions/math/ndtr.py
"""Support for SleepIQ sensors.""" from homeassistant.components import sleepiq ICON = "mdi:hotel" def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the SleepIQ sensors.""" if discovery_info is None: return data = sleepiq.DATA data.update() dev = list() for bed_id, bed in data.beds.items(): for side in sleepiq.SIDES: if getattr(bed, side) is not None: dev.append(SleepNumberSensor(data, bed_id, side)) add_entities(dev) class SleepNumberSensor(sleepiq.SleepIQSensor): """Implementation of a SleepIQ sensor.""" def __init__(self, sleepiq_data, bed_id, side): """Initialize the sensor.""" sleepiq.SleepIQSensor.__init__(self, sleepiq_data, bed_id, side) self._state = None self.type = sleepiq.SLEEP_NUMBER self._name = sleepiq.SENSOR_TYPES[self.type] self.update() @property def state(self): """Return the state of the sensor.""" return self._state @property def icon(self): """Icon to use in the frontend, if any.""" return ICON def update(self): """Get the latest data from SleepIQ and updates the states.""" sleepiq.SleepIQSensor.update(self) self._state = self.side.sleep_number
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/sleepiq/sensor.py
"""Real-time information about public transport departures in Norway.""" from datetime import datetime, timedelta import logging import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( ATTR_ATTRIBUTION, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME, CONF_SHOW_ON_MAP, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) API_CLIENT_NAME = "homeassistant-homeassistant" ATTRIBUTION = "Data provided by entur.org under NLOD" CONF_STOP_IDS = "stop_ids" CONF_EXPAND_PLATFORMS = "expand_platforms" CONF_WHITELIST_LINES = "line_whitelist" CONF_OMIT_NON_BOARDING = "omit_non_boarding" CONF_NUMBER_OF_DEPARTURES = "number_of_departures" DEFAULT_NAME = "Entur" DEFAULT_ICON_KEY = "bus" ICONS = { "air": "mdi:airplane", "bus": "mdi:bus", "metro": "mdi:subway", "rail": "mdi:train", "tram": "mdi:tram", "water": "mdi:ferry", } SCAN_INTERVAL = timedelta(seconds=45) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_STOP_IDS): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_EXPAND_PLATFORMS, default=True): cv.boolean, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean, vol.Optional(CONF_WHITELIST_LINES, default=[]): cv.ensure_list, vol.Optional(CONF_OMIT_NON_BOARDING, default=True): cv.boolean, vol.Optional(CONF_NUMBER_OF_DEPARTURES, default=2): vol.All( cv.positive_int, vol.Range(min=2, max=10) ), } ) ATTR_STOP_ID = "stop_id" ATTR_ROUTE = "route" ATTR_ROUTE_ID = "route_id" ATTR_EXPECTED_AT = "due_at" ATTR_DELAY = "delay" ATTR_REALTIME = "real_time" ATTR_NEXT_UP_IN = "next_due_in" ATTR_NEXT_UP_ROUTE = "next_route" ATTR_NEXT_UP_ROUTE_ID = "next_route_id" ATTR_NEXT_UP_AT = "next_due_at" ATTR_NEXT_UP_DELAY = "next_delay" ATTR_NEXT_UP_REALTIME = "next_real_time" ATTR_TRANSPORT_MODE = "transport_mode" def due_in_minutes(timestamp: datetime) -> int: """Get the time in minutes from a timestamp.""" if timestamp is None: return None diff = timestamp - dt_util.now() return int(diff.total_seconds() / 60) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Entur public transport sensor.""" from enturclient import EnturPublicTransportData expand = config.get(CONF_EXPAND_PLATFORMS) line_whitelist = config.get(CONF_WHITELIST_LINES) name = config.get(CONF_NAME) show_on_map = config.get(CONF_SHOW_ON_MAP) stop_ids = config.get(CONF_STOP_IDS) omit_non_boarding = config.get(CONF_OMIT_NON_BOARDING) number_of_departures = config.get(CONF_NUMBER_OF_DEPARTURES) stops = [s for s in stop_ids if "StopPlace" in s] quays = [s for s in stop_ids if "Quay" in s] data = EnturPublicTransportData( API_CLIENT_NAME, stops=stops, quays=quays, line_whitelist=line_whitelist, omit_non_boarding=omit_non_boarding, number_of_departures=number_of_departures, web_session=async_get_clientsession(hass), ) if expand: await data.expand_all_quays() await data.update() proxy = EnturProxy(data) entities = [] for place in data.all_stop_places_quays(): try: given_name = "{} {}".format(name, data.get_stop_info(place).name) except KeyError: given_name = f"{name} {place}" entities.append( EnturPublicTransportSensor(proxy, given_name, place, show_on_map) ) async_add_entities(entities, True) class EnturProxy: """Proxy for the Entur client. Ensure throttle to not hit rate limiting on the API. """ def __init__(self, api): """Initialize the proxy.""" self._api = api @Throttle(timedelta(seconds=15)) async def async_update(self) -> None: """Update data in client.""" await self._api.update() def get_stop_info(self, stop_id: str) -> dict: """Get info about specific stop place.""" return self._api.get_stop_info(stop_id) class EnturPublicTransportSensor(Entity): """Implementation of a Entur public transport sensor.""" def __init__(self, api: EnturProxy, name: str, stop: str, show_on_map: bool): """Initialize the sensor.""" self.api = api self._stop = stop self._show_on_map = show_on_map self._name = name self._state = None self._icon = ICONS[DEFAULT_ICON_KEY] self._attributes = {} @property def name(self) -> str: """Return the name of the sensor.""" return self._name @property def state(self) -> str: """Return the state of the sensor.""" return self._state @property def device_state_attributes(self) -> dict: """Return the state attributes.""" self._attributes[ATTR_ATTRIBUTION] = ATTRIBUTION self._attributes[ATTR_STOP_ID] = self._stop return self._attributes @property def unit_of_measurement(self) -> str: """Return the unit this state is expressed in.""" return "min" @property def icon(self) -> str: """Icon to use in the frontend.""" return self._icon async def async_update(self) -> None: """Get the latest data and update the states.""" await self.api.async_update() self._attributes = {} data = self.api.get_stop_info(self._stop) if data is None: self._state = None return if self._show_on_map and data.latitude and data.longitude: self._attributes[CONF_LATITUDE] = data.latitude self._attributes[CONF_LONGITUDE] = data.longitude calls = data.estimated_calls if not calls: self._state = None return self._state = due_in_minutes(calls[0].expected_departure_time) self._icon = ICONS.get(calls[0].transport_mode, ICONS[DEFAULT_ICON_KEY]) self._attributes[ATTR_ROUTE] = calls[0].front_display self._attributes[ATTR_ROUTE_ID] = calls[0].line_id self._attributes[ATTR_EXPECTED_AT] = calls[0].expected_departure_time.strftime( "%H:%M" ) self._attributes[ATTR_REALTIME] = calls[0].is_realtime self._attributes[ATTR_DELAY] = calls[0].delay_in_min number_of_calls = len(calls) if number_of_calls < 2: return self._attributes[ATTR_NEXT_UP_ROUTE] = calls[1].front_display self._attributes[ATTR_NEXT_UP_ROUTE_ID] = calls[1].line_id self._attributes[ATTR_NEXT_UP_AT] = calls[1].expected_departure_time.strftime( "%H:%M" ) self._attributes[ATTR_NEXT_UP_IN] = "{} min".format( due_in_minutes(calls[1].expected_departure_time) ) self._attributes[ATTR_NEXT_UP_REALTIME] = calls[1].is_realtime self._attributes[ATTR_NEXT_UP_DELAY] = calls[1].delay_in_min if number_of_calls < 3: return for i, call in enumerate(calls[2:]): key_name = "departure_#" + str(i + 3) self._attributes[key_name] = "{}{} {}".format( "" if bool(call.is_realtime) else "ca. ", call.expected_departure_time.strftime("%H:%M"), call.front_display, )
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/entur_public_transport/sensor.py
"""Support for ISY994 covers.""" import logging from typing import Callable from homeassistant.components.cover import DOMAIN, CoverDevice from homeassistant.const import ( STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING, STATE_UNKNOWN, ) from homeassistant.helpers.typing import ConfigType from . import ISY994_NODES, ISY994_PROGRAMS, ISYDevice _LOGGER = logging.getLogger(__name__) VALUE_TO_STATE = { 0: STATE_CLOSED, 101: STATE_UNKNOWN, 102: "stopped", 103: STATE_CLOSING, 104: STATE_OPENING, } def setup_platform( hass, config: ConfigType, add_entities: Callable[[list], None], discovery_info=None ): """Set up the ISY994 cover platform.""" devices = [] for node in hass.data[ISY994_NODES][DOMAIN]: devices.append(ISYCoverDevice(node)) for name, status, actions in hass.data[ISY994_PROGRAMS][DOMAIN]: devices.append(ISYCoverProgram(name, status, actions)) add_entities(devices) class ISYCoverDevice(ISYDevice, CoverDevice): """Representation of an ISY994 cover device.""" @property def current_cover_position(self) -> int: """Return the current cover position.""" if self.is_unknown() or self.value is None: return None return sorted((0, self.value, 100))[1] @property def is_closed(self) -> bool: """Get whether the ISY994 cover device is closed.""" return self.state == STATE_CLOSED @property def state(self) -> str: """Get the state of the ISY994 cover device.""" if self.is_unknown(): return None return VALUE_TO_STATE.get(self.value, STATE_OPEN) def open_cover(self, **kwargs) -> None: """Send the open cover command to the ISY994 cover device.""" if not self._node.on(val=100): _LOGGER.error("Unable to open the cover") def close_cover(self, **kwargs) -> None: """Send the close cover command to the ISY994 cover device.""" if not self._node.off(): _LOGGER.error("Unable to close the cover") class ISYCoverProgram(ISYCoverDevice): """Representation of an ISY994 cover program.""" def __init__(self, name: str, node: object, actions: object) -> None: """Initialize the ISY994 cover program.""" super().__init__(node) self._name = name self._actions = actions @property def state(self) -> str: """Get the state of the ISY994 cover program.""" return STATE_CLOSED if bool(self.value) else STATE_OPEN def open_cover(self, **kwargs) -> None: """Send the open cover command to the ISY994 cover program.""" if not self._actions.runThen(): _LOGGER.error("Unable to open the cover") def close_cover(self, **kwargs) -> None: """Send the close cover command to the ISY994 cover program.""" if not self._actions.runElse(): _LOGGER.error("Unable to close the cover")
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/isy994/cover.py
"""Register a custom front end panel.""" import logging import os import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.loader import bind_hass _LOGGER = logging.getLogger(__name__) DOMAIN = "panel_custom" CONF_COMPONENT_NAME = "name" CONF_SIDEBAR_TITLE = "sidebar_title" CONF_SIDEBAR_ICON = "sidebar_icon" CONF_URL_PATH = "url_path" CONF_CONFIG = "config" CONF_WEBCOMPONENT_PATH = "webcomponent_path" CONF_JS_URL = "js_url" CONF_MODULE_URL = "module_url" CONF_EMBED_IFRAME = "embed_iframe" CONF_TRUST_EXTERNAL_SCRIPT = "trust_external_script" CONF_URL_EXCLUSIVE_GROUP = "url_exclusive_group" CONF_REQUIRE_ADMIN = "require_admin" MSG_URL_CONFLICT = "Pass in only one of webcomponent_path, module_url or js_url" DEFAULT_EMBED_IFRAME = False DEFAULT_TRUST_EXTERNAL = False DEFAULT_ICON = "mdi:bookmark" LEGACY_URL = "/api/panel_custom/{}" PANEL_DIR = "panels" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.All( cv.ensure_list, [ vol.Schema( { vol.Required(CONF_COMPONENT_NAME): cv.string, vol.Optional(CONF_SIDEBAR_TITLE): cv.string, vol.Optional(CONF_SIDEBAR_ICON, default=DEFAULT_ICON): cv.icon, vol.Optional(CONF_URL_PATH): cv.string, vol.Optional(CONF_CONFIG): dict, vol.Exclusive( CONF_WEBCOMPONENT_PATH, CONF_URL_EXCLUSIVE_GROUP, msg=MSG_URL_CONFLICT, ): cv.string, vol.Exclusive( CONF_JS_URL, CONF_URL_EXCLUSIVE_GROUP, msg=MSG_URL_CONFLICT ): cv.string, vol.Exclusive( CONF_MODULE_URL, CONF_URL_EXCLUSIVE_GROUP, msg=MSG_URL_CONFLICT, ): cv.string, vol.Optional( CONF_EMBED_IFRAME, default=DEFAULT_EMBED_IFRAME ): cv.boolean, vol.Optional( CONF_TRUST_EXTERNAL_SCRIPT, default=DEFAULT_TRUST_EXTERNAL ): cv.boolean, vol.Optional(CONF_REQUIRE_ADMIN, default=False): cv.boolean, } ) ], ) }, extra=vol.ALLOW_EXTRA, ) @bind_hass async def async_register_panel( hass, # The url to serve the panel frontend_url_path, # The webcomponent name that loads your panel webcomponent_name, # Title/icon for sidebar sidebar_title=None, sidebar_icon=None, # HTML source of your panel html_url=None, # JS source of your panel js_url=None, # JS module of your panel module_url=None, # If your panel should be run inside an iframe embed_iframe=DEFAULT_EMBED_IFRAME, # Should user be asked for confirmation when loading external source trust_external=DEFAULT_TRUST_EXTERNAL, # Configuration to be passed to the panel config=None, # If your panel should only be shown to admin users require_admin=False, ): """Register a new custom panel.""" if js_url is None and html_url is None and module_url is None: raise ValueError("Either js_url, module_url or html_url is required.") if (js_url and html_url) or (module_url and html_url): raise ValueError("Pass in only one of JS url, Module url or HTML url.") if config is not None and not isinstance(config, dict): raise ValueError("Config needs to be a dictionary.") custom_panel_config = { "name": webcomponent_name, "embed_iframe": embed_iframe, "trust_external": trust_external, } if js_url is not None: custom_panel_config["js_url"] = js_url if module_url is not None: custom_panel_config["module_url"] = module_url if html_url is not None: custom_panel_config["html_url"] = html_url if config is not None: # Make copy because we're mutating it config = dict(config) else: config = {} config["_panel_custom"] = custom_panel_config hass.components.frontend.async_register_built_in_panel( component_name="custom", sidebar_title=sidebar_title, sidebar_icon=sidebar_icon, frontend_url_path=frontend_url_path, config=config, require_admin=require_admin, ) async def async_setup(hass, config): """Initialize custom panel.""" if DOMAIN not in config: return True success = False for panel in config[DOMAIN]: name = panel[CONF_COMPONENT_NAME] kwargs = { "webcomponent_name": panel[CONF_COMPONENT_NAME], "frontend_url_path": panel.get(CONF_URL_PATH, name), "sidebar_title": panel.get(CONF_SIDEBAR_TITLE), "sidebar_icon": panel.get(CONF_SIDEBAR_ICON), "config": panel.get(CONF_CONFIG), "trust_external": panel[CONF_TRUST_EXTERNAL_SCRIPT], "embed_iframe": panel[CONF_EMBED_IFRAME], "require_admin": panel[CONF_REQUIRE_ADMIN], } panel_path = panel.get(CONF_WEBCOMPONENT_PATH) if panel_path is None: panel_path = hass.config.path(PANEL_DIR, f"{name}.html") if CONF_JS_URL in panel: kwargs["js_url"] = panel[CONF_JS_URL] elif CONF_MODULE_URL in panel: kwargs["module_url"] = panel[CONF_MODULE_URL] elif not await hass.async_add_job(os.path.isfile, panel_path): _LOGGER.error("Unable to find webcomponent for %s: %s", name, panel_path) continue else: url = LEGACY_URL.format(name) hass.http.register_static_path(url, panel_path) kwargs["html_url"] = url await async_register_panel(hass, **kwargs) success = True return success
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/panel_custom/__init__.py
"""Support for Danfoss Air HRV.""" from datetime import timedelta import logging import voluptuous as vol from homeassistant.const import CONF_HOST from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) DANFOSS_AIR_PLATFORMS = ["sensor", "binary_sensor", "switch"] DOMAIN = "danfoss_air" MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60) CONFIG_SCHEMA = vol.Schema( {DOMAIN: vol.Schema({vol.Required(CONF_HOST): cv.string})}, extra=vol.ALLOW_EXTRA ) def setup(hass, config): """Set up the Danfoss Air component.""" conf = config[DOMAIN] hass.data[DOMAIN] = DanfossAir(conf[CONF_HOST]) for platform in DANFOSS_AIR_PLATFORMS: discovery.load_platform(hass, platform, DOMAIN, {}, config) return True class DanfossAir: """Handle all communication with Danfoss Air CCM unit.""" def __init__(self, host): """Initialize the Danfoss Air CCM connection.""" self._data = {} from pydanfossair.danfossclient import DanfossClient self._client = DanfossClient(host) def get_value(self, item): """Get value for sensor.""" return self._data.get(item) def update_state(self, command, state_command): """Send update command to Danfoss Air CCM.""" self._data[state_command] = self._client.command(command) @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Use the data from Danfoss Air API.""" _LOGGER.debug("Fetching data from Danfoss Air CCM module") from pydanfossair.commands import ReadCommand self._data[ReadCommand.exhaustTemperature] = self._client.command( ReadCommand.exhaustTemperature ) self._data[ReadCommand.outdoorTemperature] = self._client.command( ReadCommand.outdoorTemperature ) self._data[ReadCommand.supplyTemperature] = self._client.command( ReadCommand.supplyTemperature ) self._data[ReadCommand.extractTemperature] = self._client.command( ReadCommand.extractTemperature ) self._data[ReadCommand.humidity] = round( self._client.command(ReadCommand.humidity), 2 ) self._data[ReadCommand.filterPercent] = round( self._client.command(ReadCommand.filterPercent), 2 ) self._data[ReadCommand.bypass] = self._client.command(ReadCommand.bypass) self._data[ReadCommand.fan_step] = self._client.command(ReadCommand.fan_step) self._data[ReadCommand.supply_fan_speed] = self._client.command( ReadCommand.supply_fan_speed ) self._data[ReadCommand.exhaust_fan_speed] = self._client.command( ReadCommand.exhaust_fan_speed ) self._data[ReadCommand.away_mode] = self._client.command(ReadCommand.away_mode) self._data[ReadCommand.boost] = self._client.command(ReadCommand.boost) self._data[ReadCommand.battery_percent] = self._client.command( ReadCommand.battery_percent ) self._data[ReadCommand.bypass] = self._client.command(ReadCommand.bypass) self._data[ReadCommand.automatic_bypass] = self._client.command( ReadCommand.automatic_bypass ) _LOGGER.debug("Done fetching data from Danfoss Air CCM module")
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/danfoss_air/__init__.py
"""Config flow to configure the OpenUV component.""" import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_API_KEY, CONF_ELEVATION, CONF_LATITUDE, CONF_LONGITUDE, ) from homeassistant.core import callback from homeassistant.helpers import aiohttp_client, config_validation as cv from .const import DOMAIN @callback def configured_instances(hass): """Return a set of configured OpenUV instances.""" return set( "{0}, {1}".format( entry.data.get(CONF_LATITUDE, hass.config.latitude), entry.data.get(CONF_LONGITUDE, hass.config.longitude), ) for entry in hass.config_entries.async_entries(DOMAIN) ) @config_entries.HANDLERS.register(DOMAIN) class OpenUvFlowHandler(config_entries.ConfigFlow): """Handle an OpenUV config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL def __init__(self): """Initialize the config flow.""" pass async def _show_form(self, errors=None): """Show the form to the user.""" data_schema = vol.Schema( { vol.Required(CONF_API_KEY): str, vol.Optional(CONF_LATITUDE): cv.latitude, vol.Optional(CONF_LONGITUDE): cv.longitude, vol.Optional(CONF_ELEVATION): vol.Coerce(float), } ) return self.async_show_form( step_id="user", data_schema=data_schema, errors=errors if errors else {} ) async def async_step_import(self, import_config): """Import a config entry from configuration.yaml.""" return await self.async_step_user(import_config) async def async_step_user(self, user_input=None): """Handle the start of the config flow.""" from pyopenuv import Client from pyopenuv.errors import OpenUvError if not user_input: return await self._show_form() identifier = "{0}, {1}".format( user_input.get(CONF_LATITUDE, self.hass.config.latitude), user_input.get(CONF_LONGITUDE, self.hass.config.longitude), ) if identifier in configured_instances(self.hass): return await self._show_form({CONF_LATITUDE: "identifier_exists"}) websession = aiohttp_client.async_get_clientsession(self.hass) client = Client(user_input[CONF_API_KEY], 0, 0, websession) try: await client.uv_index() except OpenUvError: return await self._show_form({CONF_API_KEY: "invalid_api_key"}) return self.async_create_entry(title=identifier, data=user_input)
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/openuv/config_flow.py
"""Support for MQTT room presence detection.""" import logging import json from datetime import timedelta import voluptuous as vol from homeassistant.components import mqtt import homeassistant.helpers.config_validation as cv from homeassistant.components.mqtt import CONF_STATE_TOPIC from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONF_NAME, CONF_TIMEOUT, STATE_NOT_HOME, ATTR_ID from homeassistant.core import callback from homeassistant.helpers.entity import Entity from homeassistant.util import dt, slugify _LOGGER = logging.getLogger(__name__) ATTR_DEVICE_ID = "device_id" ATTR_DISTANCE = "distance" ATTR_ROOM = "room" CONF_DEVICE_ID = "device_id" CONF_AWAY_TIMEOUT = "away_timeout" DEFAULT_AWAY_TIMEOUT = 0 DEFAULT_NAME = "Room Sensor" DEFAULT_TIMEOUT = 5 DEFAULT_TOPIC = "room_presence" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_DEVICE_ID): cv.string, vol.Required(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int, vol.Optional(CONF_AWAY_TIMEOUT, default=DEFAULT_AWAY_TIMEOUT): cv.positive_int, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ).extend(mqtt.MQTT_RO_PLATFORM_SCHEMA.schema) MQTT_PAYLOAD = vol.Schema( vol.All( json.loads, vol.Schema( { vol.Required(ATTR_ID): cv.string, vol.Required(ATTR_DISTANCE): vol.Coerce(float), }, extra=vol.ALLOW_EXTRA, ), ) ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up MQTT room Sensor.""" async_add_entities( [ MQTTRoomSensor( config.get(CONF_NAME), config.get(CONF_STATE_TOPIC), config.get(CONF_DEVICE_ID), config.get(CONF_TIMEOUT), config.get(CONF_AWAY_TIMEOUT), ) ] ) class MQTTRoomSensor(Entity): """Representation of a room sensor that is updated via MQTT.""" def __init__(self, name, state_topic, device_id, timeout, consider_home): """Initialize the sensor.""" self._state = STATE_NOT_HOME self._name = name self._state_topic = "{}{}".format(state_topic, "/+") self._device_id = slugify(device_id).upper() self._timeout = timeout self._consider_home = ( timedelta(seconds=consider_home) if consider_home else None ) self._distance = None self._updated = None async def async_added_to_hass(self): """Subscribe to MQTT events.""" @callback def update_state(device_id, room, distance): """Update the sensor state.""" self._state = room self._distance = distance self._updated = dt.utcnow() self.async_schedule_update_ha_state() @callback def message_received(msg): """Handle new MQTT messages.""" try: data = MQTT_PAYLOAD(msg.payload) except vol.MultipleInvalid as error: _LOGGER.debug("Skipping update because of malformatted data: %s", error) return device = _parse_update_data(msg.topic, data) if device.get(CONF_DEVICE_ID) == self._device_id: if self._distance is None or self._updated is None: update_state(**device) else: # update if: # device is in the same room OR # device is closer to another room OR # last update from other room was too long ago timediff = dt.utcnow() - self._updated if ( device.get(ATTR_ROOM) == self._state or device.get(ATTR_DISTANCE) < self._distance or timediff.seconds >= self._timeout ): update_state(**device) return await mqtt.async_subscribe( self.hass, self._state_topic, message_received, 1 ) @property def name(self): """Return the name of the sensor.""" return self._name @property def device_state_attributes(self): """Return the state attributes.""" return {ATTR_DISTANCE: self._distance} @property def state(self): """Return the current room of the entity.""" return self._state def update(self): """Update the state for absent devices.""" if ( self._updated and self._consider_home and dt.utcnow() - self._updated > self._consider_home ): self._state = STATE_NOT_HOME def _parse_update_data(topic, data): """Parse the room presence update.""" parts = topic.split("/") room = parts[-1] device_id = slugify(data.get(ATTR_ID)).upper() distance = data.get("distance") parsed_data = {ATTR_DEVICE_ID: device_id, ATTR_ROOM: room, ATTR_DISTANCE: distance} return parsed_data
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/mqtt_room/sensor.py
"""Support for functionality to have conversations with Home Assistant.""" import logging import re import voluptuous as vol from homeassistant import core from homeassistant.components import http from homeassistant.components.cover import INTENT_CLOSE_COVER, INTENT_OPEN_COVER from homeassistant.components.http.data_validator import RequestDataValidator from homeassistant.const import EVENT_COMPONENT_LOADED from homeassistant.core import callback from homeassistant.helpers import config_validation as cv, intent from homeassistant.loader import bind_hass from homeassistant.setup import ATTR_COMPONENT from .util import create_matcher _LOGGER = logging.getLogger(__name__) ATTR_TEXT = "text" DOMAIN = "conversation" REGEX_TURN_COMMAND = re.compile(r"turn (?P<name>(?: |\w)+) (?P<command>\w+)") REGEX_TYPE = type(re.compile("")) UTTERANCES = { "cover": { INTENT_OPEN_COVER: ["Open [the] [a] [an] {name}[s]"], INTENT_CLOSE_COVER: ["Close [the] [a] [an] {name}[s]"], } } SERVICE_PROCESS = "process" SERVICE_PROCESS_SCHEMA = vol.Schema({vol.Required(ATTR_TEXT): cv.string}) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional("intents"): vol.Schema( {cv.string: vol.All(cv.ensure_list, [cv.string])} ) } ) }, extra=vol.ALLOW_EXTRA, ) @core.callback @bind_hass def async_register(hass, intent_type, utterances): """Register utterances and any custom intents. Registrations don't require conversations to be loaded. They will become active once the conversation component is loaded. """ intents = hass.data.get(DOMAIN) if intents is None: intents = hass.data[DOMAIN] = {} conf = intents.get(intent_type) if conf is None: conf = intents[intent_type] = [] for utterance in utterances: if isinstance(utterance, REGEX_TYPE): conf.append(utterance) else: conf.append(create_matcher(utterance)) async def async_setup(hass, config): """Register the process service.""" config = config.get(DOMAIN, {}) intents = hass.data.get(DOMAIN) if intents is None: intents = hass.data[DOMAIN] = {} for intent_type, utterances in config.get("intents", {}).items(): conf = intents.get(intent_type) if conf is None: conf = intents[intent_type] = [] conf.extend(create_matcher(utterance) for utterance in utterances) async def process(service): """Parse text into commands.""" text = service.data[ATTR_TEXT] _LOGGER.debug("Processing: <%s>", text) try: await _process(hass, text) except intent.IntentHandleError as err: _LOGGER.error("Error processing %s: %s", text, err) hass.services.async_register( DOMAIN, SERVICE_PROCESS, process, schema=SERVICE_PROCESS_SCHEMA ) hass.http.register_view(ConversationProcessView) # We strip trailing 's' from name because our state matcher will fail # if a letter is not there. By removing 's' we can match singular and # plural names. async_register( hass, intent.INTENT_TURN_ON, ["Turn [the] [a] {name}[s] on", "Turn on [the] [a] [an] {name}[s]"], ) async_register( hass, intent.INTENT_TURN_OFF, ["Turn [the] [a] [an] {name}[s] off", "Turn off [the] [a] [an] {name}[s]"], ) async_register( hass, intent.INTENT_TOGGLE, ["Toggle [the] [a] [an] {name}[s]", "[the] [a] [an] {name}[s] toggle"], ) @callback def register_utterances(component): """Register utterances for a component.""" if component not in UTTERANCES: return for intent_type, sentences in UTTERANCES[component].items(): async_register(hass, intent_type, sentences) @callback def component_loaded(event): """Handle a new component loaded.""" register_utterances(event.data[ATTR_COMPONENT]) hass.bus.async_listen(EVENT_COMPONENT_LOADED, component_loaded) # Check already loaded components. for component in hass.config.components: register_utterances(component) return True async def _process(hass, text): """Process a line of text.""" intents = hass.data.get(DOMAIN, {}) for intent_type, matchers in intents.items(): for matcher in matchers: match = matcher.match(text) if not match: continue response = await hass.helpers.intent.async_handle( DOMAIN, intent_type, {key: {"value": value} for key, value in match.groupdict().items()}, text, ) return response class ConversationProcessView(http.HomeAssistantView): """View to retrieve shopping list content.""" url = "/api/conversation/process" name = "api:conversation:process" @RequestDataValidator(vol.Schema({vol.Required("text"): str})) async def post(self, request, data): """Send a request for processing.""" hass = request.app["hass"] try: intent_result = await _process(hass, data["text"]) except intent.IntentHandleError as err: intent_result = intent.IntentResponse() intent_result.async_set_speech(str(err)) if intent_result is None: intent_result = intent.IntentResponse() intent_result.async_set_speech("Sorry, I didn't understand that") return self.json(intent_result)
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/conversation/__init__.py
"""Support for Generic Modbus Thermostats.""" import logging import struct import voluptuous as vol from homeassistant.components.climate import PLATFORM_SCHEMA, ClimateDevice from homeassistant.components.climate.const import ( SUPPORT_TARGET_TEMPERATURE, HVAC_MODE_HEAT, ) from homeassistant.const import ATTR_TEMPERATURE, CONF_NAME, CONF_SLAVE import homeassistant.helpers.config_validation as cv from . import CONF_HUB, DEFAULT_HUB, DOMAIN as MODBUS_DOMAIN _LOGGER = logging.getLogger(__name__) CONF_TARGET_TEMP = "target_temp_register" CONF_CURRENT_TEMP = "current_temp_register" CONF_DATA_TYPE = "data_type" CONF_COUNT = "data_count" CONF_PRECISION = "precision" DATA_TYPE_INT = "int" DATA_TYPE_UINT = "uint" DATA_TYPE_FLOAT = "float" SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE HVAC_MODES = [HVAC_MODE_HEAT] PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_CURRENT_TEMP): cv.positive_int, vol.Required(CONF_NAME): cv.string, vol.Required(CONF_SLAVE): cv.positive_int, vol.Required(CONF_TARGET_TEMP): cv.positive_int, vol.Optional(CONF_COUNT, default=2): cv.positive_int, vol.Optional(CONF_DATA_TYPE, default=DATA_TYPE_FLOAT): vol.In( [DATA_TYPE_INT, DATA_TYPE_UINT, DATA_TYPE_FLOAT] ), vol.Optional(CONF_HUB, default=DEFAULT_HUB): cv.string, vol.Optional(CONF_PRECISION, default=1): cv.positive_int, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Modbus Thermostat Platform.""" name = config.get(CONF_NAME) modbus_slave = config.get(CONF_SLAVE) target_temp_register = config.get(CONF_TARGET_TEMP) current_temp_register = config.get(CONF_CURRENT_TEMP) data_type = config.get(CONF_DATA_TYPE) count = config.get(CONF_COUNT) precision = config.get(CONF_PRECISION) hub_name = config.get(CONF_HUB) hub = hass.data[MODBUS_DOMAIN][hub_name] add_entities( [ ModbusThermostat( hub, name, modbus_slave, target_temp_register, current_temp_register, data_type, count, precision, ) ], True, ) class ModbusThermostat(ClimateDevice): """Representation of a Modbus Thermostat.""" def __init__( self, hub, name, modbus_slave, target_temp_register, current_temp_register, data_type, count, precision, ): """Initialize the unit.""" self._hub = hub self._name = name self._slave = modbus_slave self._target_temperature_register = target_temp_register self._current_temperature_register = current_temp_register self._target_temperature = None self._current_temperature = None self._data_type = data_type self._count = int(count) self._precision = precision self._structure = ">f" data_types = { DATA_TYPE_INT: {1: "h", 2: "i", 4: "q"}, DATA_TYPE_UINT: {1: "H", 2: "I", 4: "Q"}, DATA_TYPE_FLOAT: {1: "e", 2: "f", 4: "d"}, } self._structure = ">{}".format(data_types[self._data_type][self._count]) @property def supported_features(self): """Return the list of supported features.""" return SUPPORT_FLAGS def update(self): """Update Target & Current Temperature.""" self._target_temperature = self.read_register(self._target_temperature_register) self._current_temperature = self.read_register( self._current_temperature_register ) @property def hvac_mode(self): """Return the current HVAC mode.""" return HVAC_MODE_HEAT @property def hvac_modes(self): """Return the possible HVAC modes.""" return HVAC_MODES @property def name(self): """Return the name of the climate device.""" return self._name @property def current_temperature(self): """Return the current temperature.""" return self._current_temperature @property def target_temperature(self): """Return the target temperature.""" return self._target_temperature def set_temperature(self, **kwargs): """Set new target temperature.""" target_temperature = kwargs.get(ATTR_TEMPERATURE) if target_temperature is None: return byte_string = struct.pack(self._structure, target_temperature) register_value = struct.unpack(">h", byte_string[0:2])[0] try: self.write_register(self._target_temperature_register, register_value) except AttributeError as ex: _LOGGER.error(ex) def read_register(self, register): """Read holding register using the Modbus hub slave.""" try: result = self._hub.read_holding_registers( self._slave, register, self._count ) except AttributeError as ex: _LOGGER.error(ex) byte_string = b"".join( [x.to_bytes(2, byteorder="big") for x in result.registers] ) val = struct.unpack(self._structure, byte_string)[0] register_value = format(val, f".{self._precision}f") return register_value def write_register(self, register, value): """Write register using the Modbus hub slave.""" self._hub.write_registers(self._slave, register, [value, 0])
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/modbus/climate.py
"""Offer time listening automation rules.""" import logging import voluptuous as vol from homeassistant.core import callback from homeassistant.const import CONF_AT, CONF_PLATFORM from homeassistant.helpers import config_validation as cv from homeassistant.helpers.event import async_track_time_change # mypy: allow-untyped-defs, no-check-untyped-defs _LOGGER = logging.getLogger(__name__) TRIGGER_SCHEMA = vol.Schema( {vol.Required(CONF_PLATFORM): "time", vol.Required(CONF_AT): cv.time} ) async def async_attach_trigger(hass, config, action, automation_info): """Listen for state changes based on configuration.""" at_time = config.get(CONF_AT) hours, minutes, seconds = at_time.hour, at_time.minute, at_time.second @callback def time_automation_listener(now): """Listen for time changes and calls action.""" hass.async_run_job(action, {"trigger": {"platform": "time", "now": now}}) return async_track_time_change( hass, time_automation_listener, hour=hours, minute=minutes, second=seconds )
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/automation/time.py
"""Support for interacting with and controlling the cmus music player.""" import logging import voluptuous as vol from homeassistant.components.media_player import MediaPlayerDevice, PLATFORM_SCHEMA from homeassistant.components.media_player.const import ( MEDIA_TYPE_MUSIC, MEDIA_TYPE_PLAYLIST, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_SEEK, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_SET, ) from homeassistant.const import ( CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT, STATE_OFF, STATE_PAUSED, STATE_PLAYING, ) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "cmus" DEFAULT_PORT = 3000 SUPPORT_CMUS = ( SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_TURN_OFF | SUPPORT_TURN_ON | SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_PLAY_MEDIA | SUPPORT_SEEK | SUPPORT_PLAY ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Inclusive(CONF_HOST, "remote"): cv.string, vol.Inclusive(CONF_PASSWORD, "remote"): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discover_info=None): """Set up the CMUS platform.""" from pycmus import exceptions host = config.get(CONF_HOST) password = config.get(CONF_PASSWORD) port = config.get(CONF_PORT) name = config.get(CONF_NAME) try: cmus_remote = CmusDevice(host, password, port, name) except exceptions.InvalidPassword: _LOGGER.error("The provided password was rejected by cmus") return False add_entities([cmus_remote], True) class CmusDevice(MediaPlayerDevice): """Representation of a running cmus.""" # pylint: disable=no-member def __init__(self, server, password, port, name): """Initialize the CMUS device.""" from pycmus import remote if server: self.cmus = remote.PyCmus(server=server, password=password, port=port) auto_name = f"cmus-{server}" else: self.cmus = remote.PyCmus() auto_name = "cmus-local" self._name = name or auto_name self.status = {} def update(self): """Get the latest data and update the state.""" status = self.cmus.get_status_dict() if not status: _LOGGER.warning("Received no status from cmus") else: self.status = status @property def name(self): """Return the name of the device.""" return self._name @property def state(self): """Return the media state.""" if self.status.get("status") == "playing": return STATE_PLAYING if self.status.get("status") == "paused": return STATE_PAUSED return STATE_OFF @property def media_content_id(self): """Content ID of current playing media.""" return self.status.get("file") @property def content_type(self): """Content type of the current playing media.""" return MEDIA_TYPE_MUSIC @property def media_duration(self): """Duration of current playing media in seconds.""" return self.status.get("duration") @property def media_title(self): """Title of current playing media.""" return self.status["tag"].get("title") @property def media_artist(self): """Artist of current playing media, music track only.""" return self.status["tag"].get("artist") @property def media_track(self): """Track number of current playing media, music track only.""" return self.status["tag"].get("tracknumber") @property def media_album_name(self): """Album name of current playing media, music track only.""" return self.status["tag"].get("album") @property def media_album_artist(self): """Album artist of current playing media, music track only.""" return self.status["tag"].get("albumartist") @property def volume_level(self): """Return the volume level.""" left = self.status["set"].get("vol_left")[0] right = self.status["set"].get("vol_right")[0] if left != right: volume = float(left + right) / 2 else: volume = left return int(volume) / 100 @property def supported_features(self): """Flag media player features that are supported.""" return SUPPORT_CMUS def turn_off(self): """Service to send the CMUS the command to stop playing.""" self.cmus.player_stop() def turn_on(self): """Service to send the CMUS the command to start playing.""" self.cmus.player_play() def set_volume_level(self, volume): """Set volume level, range 0..1.""" self.cmus.set_volume(int(volume * 100)) def volume_up(self): """Set the volume up.""" left = self.status["set"].get("vol_left") right = self.status["set"].get("vol_right") if left != right: current_volume = float(left + right) / 2 else: current_volume = left if current_volume <= 100: self.cmus.set_volume(int(current_volume) + 5) def volume_down(self): """Set the volume down.""" left = self.status["set"].get("vol_left") right = self.status["set"].get("vol_right") if left != right: current_volume = float(left + right) / 2 else: current_volume = left if current_volume <= 100: self.cmus.set_volume(int(current_volume) - 5) def play_media(self, media_type, media_id, **kwargs): """Send the play command.""" if media_type in [MEDIA_TYPE_MUSIC, MEDIA_TYPE_PLAYLIST]: self.cmus.player_play_file(media_id) else: _LOGGER.error( "Invalid media type %s. Only %s and %s are supported", media_type, MEDIA_TYPE_MUSIC, MEDIA_TYPE_PLAYLIST, ) def media_pause(self): """Send the pause command.""" self.cmus.player_pause() def media_next_track(self): """Send next track command.""" self.cmus.player_next() def media_previous_track(self): """Send next track command.""" self.cmus.player_prev() def media_seek(self, position): """Send seek command.""" self.cmus.seek(position) def media_play(self): """Send the play command.""" self.cmus.player_play() def media_stop(self): """Send the stop command.""" self.cmus.stop()
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/cmus/media_player.py
"""Support for Rheem EcoNet water heaters.""" import datetime import logging import voluptuous as vol from homeassistant.components.water_heater import ( DOMAIN, PLATFORM_SCHEMA, STATE_ECO, STATE_ELECTRIC, STATE_GAS, STATE_HEAT_PUMP, STATE_HIGH_DEMAND, STATE_OFF, STATE_PERFORMANCE, SUPPORT_OPERATION_MODE, SUPPORT_TARGET_TEMPERATURE, WaterHeaterDevice, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_TEMPERATURE, CONF_PASSWORD, CONF_USERNAME, TEMP_FAHRENHEIT, ) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) ATTR_VACATION_START = "next_vacation_start_date" ATTR_VACATION_END = "next_vacation_end_date" ATTR_ON_VACATION = "on_vacation" ATTR_TODAYS_ENERGY_USAGE = "todays_energy_usage" ATTR_IN_USE = "in_use" ATTR_START_DATE = "start_date" ATTR_END_DATE = "end_date" SUPPORT_FLAGS_HEATER = SUPPORT_TARGET_TEMPERATURE | SUPPORT_OPERATION_MODE SERVICE_ADD_VACATION = "econet_add_vacation" SERVICE_DELETE_VACATION = "econet_delete_vacation" ADD_VACATION_SCHEMA = vol.Schema( { vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Optional(ATTR_START_DATE): cv.positive_int, vol.Required(ATTR_END_DATE): cv.positive_int, } ) DELETE_VACATION_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.entity_ids}) ECONET_DATA = "econet" ECONET_STATE_TO_HA = { "Energy Saver": STATE_ECO, "gas": STATE_GAS, "High Demand": STATE_HIGH_DEMAND, "Off": STATE_OFF, "Performance": STATE_PERFORMANCE, "Heat Pump Only": STATE_HEAT_PUMP, "Electric-Only": STATE_ELECTRIC, "Electric": STATE_ELECTRIC, "Heat Pump": STATE_HEAT_PUMP, } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string} ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the EcoNet water heaters.""" from pyeconet.api import PyEcoNet hass.data[ECONET_DATA] = {} hass.data[ECONET_DATA]["water_heaters"] = [] username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) econet = PyEcoNet(username, password) water_heaters = econet.get_water_heaters() hass_water_heaters = [ EcoNetWaterHeater(water_heater) for water_heater in water_heaters ] add_entities(hass_water_heaters) hass.data[ECONET_DATA]["water_heaters"].extend(hass_water_heaters) def service_handle(service): """Handle the service calls.""" entity_ids = service.data.get("entity_id") all_heaters = hass.data[ECONET_DATA]["water_heaters"] _heaters = [ x for x in all_heaters if not entity_ids or x.entity_id in entity_ids ] for _water_heater in _heaters: if service.service == SERVICE_ADD_VACATION: start = service.data.get(ATTR_START_DATE) end = service.data.get(ATTR_END_DATE) _water_heater.add_vacation(start, end) if service.service == SERVICE_DELETE_VACATION: for vacation in _water_heater.water_heater.vacations: vacation.delete() _water_heater.schedule_update_ha_state(True) hass.services.register( DOMAIN, SERVICE_ADD_VACATION, service_handle, schema=ADD_VACATION_SCHEMA ) hass.services.register( DOMAIN, SERVICE_DELETE_VACATION, service_handle, schema=DELETE_VACATION_SCHEMA ) class EcoNetWaterHeater(WaterHeaterDevice): """Representation of an EcoNet water heater.""" def __init__(self, water_heater): """Initialize the water heater.""" self.water_heater = water_heater self.supported_modes = self.water_heater.supported_modes self.econet_state_to_ha = {} self.ha_state_to_econet = {} for mode in ECONET_STATE_TO_HA: if mode in self.supported_modes: self.econet_state_to_ha[mode] = ECONET_STATE_TO_HA.get(mode) for key, value in self.econet_state_to_ha.items(): self.ha_state_to_econet[value] = key for mode in self.supported_modes: if mode not in ECONET_STATE_TO_HA: error = ( "Invalid operation mode mapping. " + mode + " doesn't map. Please report this." ) _LOGGER.error(error) @property def name(self): """Return the device name.""" return self.water_heater.name @property def available(self): """Return if the the device is online or not.""" return self.water_heater.is_connected @property def temperature_unit(self): """Return the unit of measurement.""" return TEMP_FAHRENHEIT @property def device_state_attributes(self): """Return the optional device state attributes.""" data = {} vacations = self.water_heater.get_vacations() if vacations: data[ATTR_VACATION_START] = vacations[0].start_date data[ATTR_VACATION_END] = vacations[0].end_date data[ATTR_ON_VACATION] = self.water_heater.is_on_vacation todays_usage = self.water_heater.total_usage_for_today if todays_usage: data[ATTR_TODAYS_ENERGY_USAGE] = todays_usage data[ATTR_IN_USE] = self.water_heater.in_use return data @property def current_operation(self): """ Return current operation as one of the following. ["eco", "heat_pump", "high_demand", "electric_only"] """ current_op = self.econet_state_to_ha.get(self.water_heater.mode) return current_op @property def operation_list(self): """List of available operation modes.""" op_list = [] for mode in self.supported_modes: ha_mode = self.econet_state_to_ha.get(mode) if ha_mode is not None: op_list.append(ha_mode) return op_list @property def supported_features(self): """Return the list of supported features.""" return SUPPORT_FLAGS_HEATER def set_temperature(self, **kwargs): """Set new target temperature.""" target_temp = kwargs.get(ATTR_TEMPERATURE) if target_temp is not None: self.water_heater.set_target_set_point(target_temp) else: _LOGGER.error("A target temperature must be provided") def set_operation_mode(self, operation_mode): """Set operation mode.""" op_mode_to_set = self.ha_state_to_econet.get(operation_mode) if op_mode_to_set is not None: self.water_heater.set_mode(op_mode_to_set) else: _LOGGER.error("An operation mode must be provided") def add_vacation(self, start, end): """Add a vacation to this water heater.""" if not start: start = datetime.datetime.now() else: start = datetime.datetime.fromtimestamp(start) end = datetime.datetime.fromtimestamp(end) self.water_heater.set_vacation_mode(start, end) def update(self): """Get the latest date.""" self.water_heater.update_state() @property def target_temperature(self): """Return the temperature we try to reach.""" return self.water_heater.set_point @property def min_temp(self): """Return the minimum temperature.""" return self.water_heater.min_set_point @property def max_temp(self): """Return the maximum temperature.""" return self.water_heater.max_set_point
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/econet/water_heater.py
"""Support for AlarmDecoder devices.""" import logging from datetime import timedelta import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.const import EVENT_HOMEASSISTANT_STOP, CONF_HOST from homeassistant.helpers.discovery import load_platform from homeassistant.util import dt as dt_util from homeassistant.components.binary_sensor import DEVICE_CLASSES_SCHEMA _LOGGER = logging.getLogger(__name__) DOMAIN = "alarmdecoder" DATA_AD = "alarmdecoder" CONF_DEVICE = "device" CONF_DEVICE_BAUD = "baudrate" CONF_DEVICE_PATH = "path" CONF_DEVICE_PORT = "port" CONF_DEVICE_TYPE = "type" CONF_PANEL_DISPLAY = "panel_display" CONF_ZONE_NAME = "name" CONF_ZONE_TYPE = "type" CONF_ZONE_LOOP = "loop" CONF_ZONE_RFID = "rfid" CONF_ZONES = "zones" CONF_RELAY_ADDR = "relayaddr" CONF_RELAY_CHAN = "relaychan" DEFAULT_DEVICE_TYPE = "socket" DEFAULT_DEVICE_HOST = "localhost" DEFAULT_DEVICE_PORT = 10000 DEFAULT_DEVICE_PATH = "/dev/ttyUSB0" DEFAULT_DEVICE_BAUD = 115200 DEFAULT_PANEL_DISPLAY = False DEFAULT_ZONE_TYPE = "opening" SIGNAL_PANEL_MESSAGE = "alarmdecoder.panel_message" SIGNAL_PANEL_ARM_AWAY = "alarmdecoder.panel_arm_away" SIGNAL_PANEL_ARM_HOME = "alarmdecoder.panel_arm_home" SIGNAL_PANEL_DISARM = "alarmdecoder.panel_disarm" SIGNAL_ZONE_FAULT = "alarmdecoder.zone_fault" SIGNAL_ZONE_RESTORE = "alarmdecoder.zone_restore" SIGNAL_RFX_MESSAGE = "alarmdecoder.rfx_message" SIGNAL_REL_MESSAGE = "alarmdecoder.rel_message" DEVICE_SOCKET_SCHEMA = vol.Schema( { vol.Required(CONF_DEVICE_TYPE): "socket", vol.Optional(CONF_HOST, default=DEFAULT_DEVICE_HOST): cv.string, vol.Optional(CONF_DEVICE_PORT, default=DEFAULT_DEVICE_PORT): cv.port, } ) DEVICE_SERIAL_SCHEMA = vol.Schema( { vol.Required(CONF_DEVICE_TYPE): "serial", vol.Optional(CONF_DEVICE_PATH, default=DEFAULT_DEVICE_PATH): cv.string, vol.Optional(CONF_DEVICE_BAUD, default=DEFAULT_DEVICE_BAUD): cv.string, } ) DEVICE_USB_SCHEMA = vol.Schema({vol.Required(CONF_DEVICE_TYPE): "usb"}) ZONE_SCHEMA = vol.Schema( { vol.Required(CONF_ZONE_NAME): cv.string, vol.Optional(CONF_ZONE_TYPE, default=DEFAULT_ZONE_TYPE): vol.Any( DEVICE_CLASSES_SCHEMA ), vol.Optional(CONF_ZONE_RFID): cv.string, vol.Optional(CONF_ZONE_LOOP): vol.All(vol.Coerce(int), vol.Range(min=1, max=4)), vol.Inclusive( CONF_RELAY_ADDR, "relaylocation", "Relay address and channel must exist together", ): cv.byte, vol.Inclusive( CONF_RELAY_CHAN, "relaylocation", "Relay address and channel must exist together", ): cv.byte, } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_DEVICE): vol.Any( DEVICE_SOCKET_SCHEMA, DEVICE_SERIAL_SCHEMA, DEVICE_USB_SCHEMA ), vol.Optional( CONF_PANEL_DISPLAY, default=DEFAULT_PANEL_DISPLAY ): cv.boolean, vol.Optional(CONF_ZONES): {vol.Coerce(int): ZONE_SCHEMA}, } ) }, extra=vol.ALLOW_EXTRA, ) def setup(hass, config): """Set up for the AlarmDecoder devices.""" from alarmdecoder import AlarmDecoder from alarmdecoder.devices import SocketDevice, SerialDevice, USBDevice conf = config.get(DOMAIN) restart = False device = conf.get(CONF_DEVICE) display = conf.get(CONF_PANEL_DISPLAY) zones = conf.get(CONF_ZONES) device_type = device.get(CONF_DEVICE_TYPE) host = DEFAULT_DEVICE_HOST port = DEFAULT_DEVICE_PORT path = DEFAULT_DEVICE_PATH baud = DEFAULT_DEVICE_BAUD def stop_alarmdecoder(event): """Handle the shutdown of AlarmDecoder.""" _LOGGER.debug("Shutting down alarmdecoder") nonlocal restart restart = False controller.close() def open_connection(now=None): """Open a connection to AlarmDecoder.""" from alarmdecoder.util import NoDeviceError nonlocal restart try: controller.open(baud) except NoDeviceError: _LOGGER.debug("Failed to connect. Retrying in 5 seconds") hass.helpers.event.track_point_in_time( open_connection, dt_util.utcnow() + timedelta(seconds=5) ) return _LOGGER.debug("Established a connection with the alarmdecoder") restart = True def handle_closed_connection(event): """Restart after unexpected loss of connection.""" nonlocal restart if not restart: return restart = False _LOGGER.warning("AlarmDecoder unexpectedly lost connection.") hass.add_job(open_connection) def handle_message(sender, message): """Handle message from AlarmDecoder.""" hass.helpers.dispatcher.dispatcher_send(SIGNAL_PANEL_MESSAGE, message) def handle_rfx_message(sender, message): """Handle RFX message from AlarmDecoder.""" hass.helpers.dispatcher.dispatcher_send(SIGNAL_RFX_MESSAGE, message) def zone_fault_callback(sender, zone): """Handle zone fault from AlarmDecoder.""" hass.helpers.dispatcher.dispatcher_send(SIGNAL_ZONE_FAULT, zone) def zone_restore_callback(sender, zone): """Handle zone restore from AlarmDecoder.""" hass.helpers.dispatcher.dispatcher_send(SIGNAL_ZONE_RESTORE, zone) def handle_rel_message(sender, message): """Handle relay message from AlarmDecoder.""" hass.helpers.dispatcher.dispatcher_send(SIGNAL_REL_MESSAGE, message) controller = False if device_type == "socket": host = device.get(CONF_HOST) port = device.get(CONF_DEVICE_PORT) controller = AlarmDecoder(SocketDevice(interface=(host, port))) elif device_type == "serial": path = device.get(CONF_DEVICE_PATH) baud = device.get(CONF_DEVICE_BAUD) controller = AlarmDecoder(SerialDevice(interface=path)) elif device_type == "usb": AlarmDecoder(USBDevice.find()) return False controller.on_message += handle_message controller.on_rfx_message += handle_rfx_message controller.on_zone_fault += zone_fault_callback controller.on_zone_restore += zone_restore_callback controller.on_close += handle_closed_connection controller.on_relay_changed += handle_rel_message hass.data[DATA_AD] = controller open_connection() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_alarmdecoder) load_platform(hass, "alarm_control_panel", DOMAIN, conf, config) if zones: load_platform(hass, "binary_sensor", DOMAIN, {CONF_ZONES: zones}, config) if display: load_platform(hass, "sensor", DOMAIN, conf, config) return True
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/alarmdecoder/__init__.py
"""Simplepush notification service.""" import logging import voluptuous as vol from homeassistant.const import CONF_PASSWORD import homeassistant.helpers.config_validation as cv from homeassistant.components.notify import ( ATTR_TITLE, ATTR_TITLE_DEFAULT, PLATFORM_SCHEMA, BaseNotificationService, ) _LOGGER = logging.getLogger(__name__) ATTR_ENCRYPTED = "encrypted" CONF_DEVICE_KEY = "device_key" CONF_EVENT = "event" CONF_SALT = "salt" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_DEVICE_KEY): cv.string, vol.Optional(CONF_EVENT): cv.string, vol.Inclusive(CONF_PASSWORD, ATTR_ENCRYPTED): cv.string, vol.Inclusive(CONF_SALT, ATTR_ENCRYPTED): cv.string, } ) def get_service(hass, config, discovery_info=None): """Get the Simplepush notification service.""" return SimplePushNotificationService(config) class SimplePushNotificationService(BaseNotificationService): """Implementation of the notification service for Simplepush.""" def __init__(self, config): """Initialize the Simplepush notification service.""" self._device_key = config.get(CONF_DEVICE_KEY) self._event = config.get(CONF_EVENT) self._password = config.get(CONF_PASSWORD) self._salt = config.get(CONF_SALT) def send_message(self, message="", **kwargs): """Send a message to a Simplepush user.""" from simplepush import send, send_encrypted title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT) if self._password: send_encrypted( self._device_key, self._password, self._salt, title, message, event=self._event, ) else: send(self._device_key, title, message, event=self._event)
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/simplepush/notify.py
"""Support for the Opple light.""" import logging import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, PLATFORM_SCHEMA, SUPPORT_BRIGHTNESS, SUPPORT_COLOR_TEMP, Light, ) from homeassistant.const import CONF_HOST, CONF_NAME import homeassistant.helpers.config_validation as cv from homeassistant.util.color import ( color_temperature_kelvin_to_mired as kelvin_to_mired, color_temperature_mired_to_kelvin as mired_to_kelvin, ) _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "opple light" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Opple light platform.""" name = config[CONF_NAME] host = config[CONF_HOST] entity = OppleLight(name, host) add_entities([entity]) _LOGGER.debug("Init light %s %s", host, entity.unique_id) class OppleLight(Light): """Opple light device.""" def __init__(self, name, host): """Initialize an Opple light.""" from pyoppleio.OppleLightDevice import OppleLightDevice self._device = OppleLightDevice(host) self._name = name self._is_on = None self._brightness = None self._color_temp = None @property def available(self): """Return True if light is available.""" return self._device.is_online @property def unique_id(self): """Return unique ID for light.""" return self._device.mac @property def name(self): """Return the display name of this light.""" return self._name @property def is_on(self): """Return true if light is on.""" return self._is_on @property def brightness(self): """Return the brightness of the light.""" return self._brightness @property def color_temp(self): """Return the color temperature of this light.""" return kelvin_to_mired(self._color_temp) @property def min_mireds(self): """Return minimum supported color temperature.""" return 175 @property def max_mireds(self): """Return maximum supported color temperature.""" return 333 @property def supported_features(self): """Flag supported features.""" return SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP def turn_on(self, **kwargs): """Instruct the light to turn on.""" _LOGGER.debug("Turn on light %s %s", self._device.ip, kwargs) if not self.is_on: self._device.power_on = True if ATTR_BRIGHTNESS in kwargs and self.brightness != kwargs[ATTR_BRIGHTNESS]: self._device.brightness = kwargs[ATTR_BRIGHTNESS] if ATTR_COLOR_TEMP in kwargs and self.color_temp != kwargs[ATTR_COLOR_TEMP]: color_temp = mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]) self._device.color_temperature = color_temp def turn_off(self, **kwargs): """Instruct the light to turn off.""" self._device.power_on = False _LOGGER.debug("Turn off light %s", self._device.ip) def update(self): """Synchronize state with light.""" prev_available = self.available self._device.update() if ( prev_available == self.available and self._is_on == self._device.power_on and self._brightness == self._device.brightness and self._color_temp == self._device.color_temperature ): return if not self.available: _LOGGER.debug("Light %s is offline", self._device.ip) return self._is_on = self._device.power_on self._brightness = self._device.brightness self._color_temp = self._device.color_temperature if not self.is_on: _LOGGER.debug("Update light %s success: power off", self._device.ip) else: _LOGGER.debug( "Update light %s success: power on brightness %s " "color temperature %s", self._device.ip, self._brightness, self._color_temp, )
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/opple/light.py
"""Support for sending Wake-On-LAN magic packets.""" from functools import partial import logging import voluptuous as vol from homeassistant.const import CONF_MAC import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DOMAIN = "wake_on_lan" CONF_BROADCAST_ADDRESS = "broadcast_address" SERVICE_SEND_MAGIC_PACKET = "send_magic_packet" WAKE_ON_LAN_SEND_MAGIC_PACKET_SCHEMA = vol.Schema( {vol.Required(CONF_MAC): cv.string, vol.Optional(CONF_BROADCAST_ADDRESS): cv.string} ) async def async_setup(hass, config): """Set up the wake on LAN component.""" import wakeonlan async def send_magic_packet(call): """Send magic packet to wake up a device.""" mac_address = call.data.get(CONF_MAC) broadcast_address = call.data.get(CONF_BROADCAST_ADDRESS) _LOGGER.info( "Send magic packet to mac %s (broadcast: %s)", mac_address, broadcast_address, ) if broadcast_address is not None: await hass.async_add_job( partial( wakeonlan.send_magic_packet, mac_address, ip_address=broadcast_address, ) ) else: await hass.async_add_job(partial(wakeonlan.send_magic_packet, mac_address)) hass.services.async_register( DOMAIN, SERVICE_SEND_MAGIC_PACKET, send_magic_packet, schema=WAKE_ON_LAN_SEND_MAGIC_PACKET_SCHEMA, ) return True
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/wake_on_lan/__init__.py
"""Support for XS1 switches.""" import logging from xs1_api_client.api_constants import ActuatorType from homeassistant.helpers.entity import ToggleEntity from . import ACTUATORS, DOMAIN as COMPONENT_DOMAIN, XS1DeviceEntity _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the XS1 switch platform.""" actuators = hass.data[COMPONENT_DOMAIN][ACTUATORS] switch_entities = [] for actuator in actuators: if (actuator.type() == ActuatorType.SWITCH) or ( actuator.type() == ActuatorType.DIMMER ): switch_entities.append(XS1SwitchEntity(actuator)) add_entities(switch_entities) class XS1SwitchEntity(XS1DeviceEntity, ToggleEntity): """Representation of a XS1 switch actuator.""" @property def name(self): """Return the name of the device if any.""" return self.device.name() @property def is_on(self): """Return true if switch is on.""" return self.device.value() == 100 def turn_on(self, **kwargs): """Turn the device on.""" self.device.turn_on() def turn_off(self, **kwargs): """Turn the device off.""" self.device.turn_off()
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/xs1/switch.py
"""Device tracker helpers.""" import asyncio from typing import Dict, Any, Callable, Optional from types import ModuleType import attr from homeassistant.core import callback from homeassistant.setup import async_prepare_setup_platform from homeassistant.helpers import config_per_platform from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.typing import ConfigType, HomeAssistantType from homeassistant.helpers.event import async_track_time_interval from homeassistant.util import dt as dt_util from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE from .const import ( DOMAIN, PLATFORM_TYPE_LEGACY, CONF_SCAN_INTERVAL, SCAN_INTERVAL, SOURCE_TYPE_ROUTER, LOGGER, ) @attr.s class DeviceTrackerPlatform: """Class to hold platform information.""" LEGACY_SETUP = ( "async_get_scanner", "get_scanner", "async_setup_scanner", "setup_scanner", ) name = attr.ib(type=str) platform = attr.ib(type=ModuleType) config = attr.ib(type=Dict) @property def type(self): """Return platform type.""" for methods, platform_type in ((self.LEGACY_SETUP, PLATFORM_TYPE_LEGACY),): for meth in methods: if hasattr(self.platform, meth): return platform_type return None async def async_setup_legacy(self, hass, tracker, discovery_info=None): """Set up a legacy platform.""" LOGGER.info("Setting up %s.%s", DOMAIN, self.type) try: scanner = None setup = None if hasattr(self.platform, "async_get_scanner"): scanner = await self.platform.async_get_scanner( hass, {DOMAIN: self.config} ) elif hasattr(self.platform, "get_scanner"): scanner = await hass.async_add_job( self.platform.get_scanner, hass, {DOMAIN: self.config} ) elif hasattr(self.platform, "async_setup_scanner"): setup = await self.platform.async_setup_scanner( hass, self.config, tracker.async_see, discovery_info ) elif hasattr(self.platform, "setup_scanner"): setup = await hass.async_add_job( self.platform.setup_scanner, hass, self.config, tracker.see, discovery_info, ) else: raise HomeAssistantError("Invalid legacy device_tracker platform.") if scanner: async_setup_scanner_platform( hass, self.config, scanner, tracker.async_see, self.type ) return if not setup: LOGGER.error("Error setting up platform %s", self.type) return except Exception: # pylint: disable=broad-except LOGGER.exception("Error setting up platform %s", self.type) async def async_extract_config(hass, config): """Extract device tracker config and split between legacy and modern.""" legacy = [] for platform in await asyncio.gather( *( async_create_platform_type(hass, config, p_type, p_config) for p_type, p_config in config_per_platform(config, DOMAIN) ) ): if platform is None: continue if platform.type == PLATFORM_TYPE_LEGACY: legacy.append(platform) else: raise ValueError( "Unable to determine type for {}: {}".format( platform.name, platform.type ) ) return legacy async def async_create_platform_type( hass, config, p_type, p_config ) -> Optional[DeviceTrackerPlatform]: """Determine type of platform.""" platform = await async_prepare_setup_platform(hass, config, DOMAIN, p_type) if platform is None: return None return DeviceTrackerPlatform(p_type, platform, p_config) @callback def async_setup_scanner_platform( hass: HomeAssistantType, config: ConfigType, scanner: Any, async_see_device: Callable, platform: str, ): """Set up the connect scanner-based platform to device tracker. This method must be run in the event loop. """ interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL) update_lock = asyncio.Lock() scanner.hass = hass # Initial scan of each mac we also tell about host name for config seen: Any = set() async def async_device_tracker_scan(now: dt_util.dt.datetime): """Handle interval matches.""" if update_lock.locked(): LOGGER.warning( "Updating device list from %s took longer than the scheduled " "scan interval %s", platform, interval, ) return async with update_lock: found_devices = await scanner.async_scan_devices() for mac in found_devices: if mac in seen: host_name = None else: host_name = await scanner.async_get_device_name(mac) seen.add(mac) try: extra_attributes = await scanner.async_get_extra_attributes(mac) except NotImplementedError: extra_attributes = dict() kwargs = { "mac": mac, "host_name": host_name, "source_type": SOURCE_TYPE_ROUTER, "attributes": { "scanner": scanner.__class__.__name__, **extra_attributes, }, } zone_home = hass.states.get(hass.components.zone.ENTITY_ID_HOME) if zone_home: kwargs["gps"] = [ zone_home.attributes[ATTR_LATITUDE], zone_home.attributes[ATTR_LONGITUDE], ] kwargs["gps_accuracy"] = 0 hass.async_create_task(async_see_device(**kwargs)) async_track_time_interval(hass, async_device_tracker_scan, interval) hass.async_create_task(async_device_tracker_scan(None))
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/device_tracker/setup.py
"""Constants for the IGD component.""" import logging CONF_ENABLE_PORT_MAPPING = "port_mapping" CONF_ENABLE_SENSORS = "sensors" CONF_HASS = "hass" CONF_LOCAL_IP = "local_ip" CONF_PORTS = "ports" DOMAIN = "upnp" LOGGER = logging.getLogger(__package__) SIGNAL_REMOVE_SENSOR = "upnp_remove_sensor"
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/upnp/const.py
"""Support for the ZHA platform.""" import logging import time from homeassistant.components.device_tracker import DOMAIN, SOURCE_TYPE_ROUTER from homeassistant.components.device_tracker.config_entry import ScannerEntity from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from .core.const import ( CHANNEL_POWER_CONFIGURATION, DATA_ZHA, DATA_ZHA_DISPATCHERS, SIGNAL_ATTR_UPDATED, ZHA_DISCOVERY_NEW, ) from .entity import ZhaEntity from .sensor import battery_percentage_remaining_formatter _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Zigbee Home Automation device tracker from config entry.""" async def async_discover(discovery_info): await _async_setup_entities( hass, config_entry, async_add_entities, [discovery_info] ) unsub = async_dispatcher_connect( hass, ZHA_DISCOVERY_NEW.format(DOMAIN), async_discover ) hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub) device_trackers = hass.data.get(DATA_ZHA, {}).get(DOMAIN) if device_trackers is not None: await _async_setup_entities( hass, config_entry, async_add_entities, device_trackers.values() ) del hass.data[DATA_ZHA][DOMAIN] async def _async_setup_entities( hass, config_entry, async_add_entities, discovery_infos ): """Set up the ZHA device trackers.""" entities = [] for discovery_info in discovery_infos: entities.append(ZHADeviceScannerEntity(**discovery_info)) async_add_entities(entities, update_before_add=True) class ZHADeviceScannerEntity(ScannerEntity, ZhaEntity): """Represent a tracked device.""" def __init__(self, **kwargs): """Initialize the ZHA device tracker.""" super().__init__(**kwargs) self._battery_channel = self.cluster_channels.get(CHANNEL_POWER_CONFIGURATION) self._connected = False self._keepalive_interval = 60 self._should_poll = True self._battery_level = None async def async_added_to_hass(self): """Run when about to be added to hass.""" await super().async_added_to_hass() if self._battery_channel: await self.async_accept_signal( self._battery_channel, SIGNAL_ATTR_UPDATED, self.async_battery_percentage_remaining_updated, ) async def async_update(self): """Handle polling.""" if self.zha_device.last_seen is None: self._connected = False else: difference = time.time() - self.zha_device.last_seen if difference > self._keepalive_interval: self._connected = False else: self._connected = True @property def is_connected(self): """Return true if the device is connected to the network.""" return self._connected @property def source_type(self): """Return the source type, eg gps or router, of the device.""" return SOURCE_TYPE_ROUTER @callback def async_battery_percentage_remaining_updated(self, value): """Handle tracking.""" self.debug("battery_percentage_remaining updated: %s", value) self._connected = True self._battery_level = battery_percentage_remaining_formatter(value) self.async_schedule_update_ha_state() @property def battery_level(self): """Return the battery level of the device. Percentage from 0-100. """ return self._battery_level
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/zha/device_tracker.py
"""Support for Rain Bird Irrigation system LNK WiFi Module.""" import logging from pyrainbird import AvailableStations, RainbirdController import voluptuous as vol from homeassistant.components.switch import SwitchDevice from homeassistant.const import ATTR_ENTITY_ID, CONF_FRIENDLY_NAME, CONF_TRIGGER_TIME from homeassistant.helpers import config_validation as cv from . import CONF_ZONES, DATA_RAINBIRD, DOMAIN, RAINBIRD_CONTROLLER _LOGGER = logging.getLogger(__name__) ATTR_DURATION = "duration" SERVICE_START_IRRIGATION = "start_irrigation" SERVICE_SCHEMA_IRRIGATION = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_DURATION): vol.All(vol.Coerce(float), vol.Range(min=0)), } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up Rain Bird switches over a Rain Bird controller.""" if discovery_info is None: return controller: RainbirdController = hass.data[DATA_RAINBIRD][ discovery_info[RAINBIRD_CONTROLLER] ] available_stations: AvailableStations = controller.get_available_stations() if not (available_stations and available_stations.stations): return devices = [] for zone in range(1, available_stations.stations.count + 1): if available_stations.stations.active(zone): zone_config = discovery_info.get(CONF_ZONES, {}).get(zone, {}) time = zone_config.get(CONF_TRIGGER_TIME, discovery_info[CONF_TRIGGER_TIME]) name = zone_config.get(CONF_FRIENDLY_NAME) devices.append( RainBirdSwitch( controller, zone, time, name if name else "Sprinkler {}".format(zone), ) ) add_entities(devices, True) def start_irrigation(service): entity_id = service.data[ATTR_ENTITY_ID] duration = service.data[ATTR_DURATION] for device in devices: if device.entity_id == entity_id: device.turn_on(duration=duration) hass.services.register( DOMAIN, SERVICE_START_IRRIGATION, start_irrigation, schema=SERVICE_SCHEMA_IRRIGATION, ) class RainBirdSwitch(SwitchDevice): """Representation of a Rain Bird switch.""" def __init__(self, controller: RainbirdController, zone, time, name): """Initialize a Rain Bird Switch Device.""" self._rainbird = controller self._zone = zone self._name = name self._state = None self._duration = time self._attributes = {ATTR_DURATION: self._duration, "zone": self._zone} @property def device_state_attributes(self): """Return state attributes.""" return self._attributes @property def name(self): """Get the name of the switch.""" return self._name def turn_on(self, **kwargs): """Turn the switch on.""" if self._rainbird.irrigate_zone( int(self._zone), int(kwargs[ATTR_DURATION] if ATTR_DURATION in kwargs else self._duration), ): self._state = True def turn_off(self, **kwargs): """Turn the switch off.""" if self._rainbird.stop_irrigation(): self._state = False def update(self): """Update switch status.""" self._state = self._rainbird.get_zone_state(self._zone) @property def is_on(self): """Return true if switch is on.""" return self._state
"""Test cases around the demo fan platform.""" import pytest from homeassistant.setup import async_setup_component from homeassistant.components import fan from homeassistant.const import STATE_OFF, STATE_ON from tests.components.fan import common FAN_ENTITY_ID = "fan.living_room_fan" def get_entity(hass): """Get the fan entity.""" return hass.states.get(FAN_ENTITY_ID) @pytest.fixture(autouse=True) def setup_comp(hass): """Initialize components.""" hass.loop.run_until_complete( async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}}) ) async def test_turn_on(hass): """Test turning on the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID, fan.SPEED_HIGH) assert STATE_ON == get_entity(hass).state assert fan.SPEED_HIGH == get_entity(hass).attributes[fan.ATTR_SPEED] async def test_turn_off(hass): """Test turning off the device.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass, FAN_ENTITY_ID) assert STATE_OFF == get_entity(hass).state async def test_turn_off_without_entity_id(hass): """Test turning off all fans.""" assert STATE_OFF == get_entity(hass).state await common.async_turn_on(hass, FAN_ENTITY_ID) assert STATE_OFF != get_entity(hass).state await common.async_turn_off(hass) assert STATE_OFF == get_entity(hass).state async def test_set_direction(hass): """Test setting the direction of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_direction(hass, FAN_ENTITY_ID, fan.DIRECTION_REVERSE) assert fan.DIRECTION_REVERSE == get_entity(hass).attributes.get("direction") async def test_set_speed(hass): """Test setting the speed of the device.""" assert STATE_OFF == get_entity(hass).state await common.async_set_speed(hass, FAN_ENTITY_ID, fan.SPEED_LOW) assert fan.SPEED_LOW == get_entity(hass).attributes.get("speed") async def test_oscillate(hass): """Test oscillating the fan.""" assert not get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, True) assert get_entity(hass).attributes.get("oscillating") await common.async_oscillate(hass, FAN_ENTITY_ID, False) assert not get_entity(hass).attributes.get("oscillating") async def test_is_on(hass): """Test is on service call.""" assert not fan.is_on(hass, FAN_ENTITY_ID) await common.async_turn_on(hass, FAN_ENTITY_ID) assert fan.is_on(hass, FAN_ENTITY_ID)
Cinntax/home-assistant
tests/components/demo/test_fan.py
homeassistant/components/rainbird/switch.py
# -*- coding: UTF-8 -*- # Authors: Thomas Hartmann <thomas.hartmann@th-ht.de> # Dirk Gütlin <dirk.guetlin@stud.sbg.ac.at> # # License: BSD (3-clause) from functools import partial import os import types import numpy as np import mne info_ignored_fields = ('file_id', 'hpi_results', 'hpi_meas', 'meas_id', 'meas_date', 'highpass', 'lowpass', 'subject_info', 'hpi_subsystem', 'experimenter', 'description', 'proj_id', 'proj_name', 'line_freq', 'gantry_angle', 'dev_head_t', 'dig', 'bads', 'projs', 'ctf_head_t', 'dev_ctf_t') ch_ignore_fields = ('logno', 'cal', 'range', 'scanno', 'coil_type', 'kind', 'loc', 'coord_frame', 'unit') info_long_fields = ('hpi_meas', ) system_to_reader_fn_dict = {'neuromag306': mne.io.read_raw_fif, 'CNT': partial(mne.io.read_raw_cnt), 'CTF': partial(mne.io.read_raw_ctf, clean_names=True), 'BTI': partial(mne.io.read_raw_bti, head_shape_fname=None, rename_channels=False, sort_by_ch_name=False), 'EGI': mne.io.read_raw_egi, 'eximia': mne.io.read_raw_eximia} ignore_channels_dict = {'BTI': ['MUz', 'MLx', 'MLy', 'MUx', 'MUy', 'MLz']} drop_extra_chans_dict = {'EGI': ['STI 014', 'DIN1', 'DIN3', 'DIN7', 'DIN4', 'DIN5', 'DIN2'], 'eximia': ['GateIn', 'Trig1', 'Trig2']} system_decimal_accuracy_dict = {'CNT': 2} pandas_not_found_warning_msg = 'The Pandas library is not installed. Not ' \ 'returning the original trialinfo matrix as ' \ 'metadata.' def _has_h5py(): try: import h5py # noqa return True except ImportError: return False def _remove_ignored_ch_fields(info): if 'chs' in info: for cur_ch in info['chs']: for cur_field in ch_ignore_fields: if cur_field in cur_ch: del cur_ch[cur_field] def _remove_long_info_fields(info): for cur_field in info_long_fields: if cur_field in info: del info[cur_field] def _remove_ignored_info_fields(info): for cur_field in info_ignored_fields: if cur_field in info: del info[cur_field] _remove_ignored_ch_fields(info) def get_data_paths(system): """Return common paths for all tests.""" test_data_folder_ft = os.path.join(mne.datasets.testing.data_path(), 'fieldtrip/ft_test_data', system) return test_data_folder_ft def get_cfg_local(system): """Return cfg_local field for the system.""" from mne.externals.pymatreader import read_mat cfg_local = read_mat(os.path.join(get_data_paths(system), 'raw_v7.mat'), ['cfg_local'])['cfg_local'] return cfg_local def get_raw_info(system): """Return the info dict of the raw data.""" cfg_local = get_cfg_local(system) raw_data_file = os.path.join(mne.datasets.testing.data_path(), cfg_local['file_name']) reader_function = system_to_reader_fn_dict[system] info = reader_function(raw_data_file, preload=False).info info['comps'] = [] return info def get_raw_data(system, drop_extra_chs=False): """Find, load and process the raw data.""" cfg_local = get_cfg_local(system) raw_data_file = os.path.join(mne.datasets.testing.data_path(), cfg_local['file_name']) reader_function = system_to_reader_fn_dict[system] raw_data = reader_function(raw_data_file, preload=True) crop = min(cfg_local['crop'], np.max(raw_data.times)) if system == 'eximia': crop -= 0.5 * (1.0 / raw_data.info['sfreq']) raw_data.crop(0, crop) raw_data.del_proj('all') raw_data.info['comps'] = [] raw_data.drop_channels(cfg_local['removed_chan_names']) if system in ['EGI']: raw_data._data[0:-1, :] = raw_data._data[0:-1, :] * 1e6 if system in ['CNT']: raw_data._data = raw_data._data * 1e6 if system in ignore_channels_dict: raw_data.drop_channels(ignore_channels_dict[system]) if system in drop_extra_chans_dict and drop_extra_chs: raw_data.drop_channels(drop_extra_chans_dict[system]) return raw_data def get_epochs(system): """Find, load and process the epoched data.""" cfg_local = get_cfg_local(system) raw_data = get_raw_data(system) if cfg_local['eventtype'] in raw_data.ch_names: stim_channel = cfg_local['eventtype'] else: stim_channel = 'STI 014' if system == 'CNT': events, event_id = mne.events_from_annotations(raw_data) events[:, 0] = events[:, 0] + 1 else: events = mne.find_events(raw_data, stim_channel=stim_channel, shortest_event=1) if isinstance(cfg_local['eventvalue'], np.ndarray): event_id = list(cfg_local['eventvalue'].astype('int')) else: event_id = [int(cfg_local['eventvalue'])] event_id = [id for id in event_id if id in events[:, 2]] epochs = mne.Epochs(raw_data, events=events, event_id=event_id, tmin=-cfg_local['prestim'], tmax=cfg_local['poststim'], baseline=None) return epochs def get_evoked(system): """Find, load and process the avg data.""" epochs = get_epochs(system) return epochs.average(picks=np.arange(len(epochs.ch_names))) def check_info_fields(expected, actual, has_raw_info, ignore_long=True): """ Check if info fields are equal. Some fields are ignored. """ expected = expected.info.copy() actual = actual.info.copy() if not has_raw_info: _remove_ignored_info_fields(expected) _remove_ignored_info_fields(actual) if info_long_fields: _remove_long_info_fields(expected) _remove_long_info_fields(actual) assert_deep_almost_equal(expected, actual) def check_data(expected, actual, system): """Check data for equality.""" decimal = 7 if system in system_decimal_accuracy_dict: decimal = system_decimal_accuracy_dict[system] np.testing.assert_almost_equal(expected, actual, decimal=decimal) def assert_deep_almost_equal(expected, actual, *args, **kwargs): """ Assert that two complex structures have almost equal contents. Compares lists, dicts and tuples recursively. Checks numeric values using test_case's :py:meth:`unittest.TestCase.assertAlmostEqual` and checks all other values with :py:meth:`unittest.TestCase.assertEqual`. Accepts additional positional and keyword arguments and pass those intact to assertAlmostEqual() (that's how you specify comparison precision). This code has been adapted from https://github.com/larsbutler/oq-engine/blob/master/tests/utils/helpers.py """ is_root = '__trace' not in kwargs trace = kwargs.pop('__trace', 'ROOT') if isinstance(expected, np.ndarray) and expected.size == 0: expected = None if isinstance(actual, np.ndarray) and actual.size == 0: actual = None try: if isinstance(expected, (int, float, complex)): np.testing.assert_almost_equal(expected, actual, *args, **kwargs) elif isinstance(expected, (list, tuple, np.ndarray, types.GeneratorType)): if isinstance(expected, types.GeneratorType): expected = list(expected) actual = list(actual) np.testing.assert_equal(len(expected), len(actual)) for index in range(len(expected)): v1, v2 = expected[index], actual[index] assert_deep_almost_equal(v1, v2, __trace=repr(index), *args, **kwargs) elif isinstance(expected, dict): np.testing.assert_equal(set(expected), set(actual)) for key in expected: assert_deep_almost_equal(expected[key], actual[key], __trace=repr(key), *args, **kwargs) else: np.testing.assert_equal(expected, actual) except AssertionError as exc: exc.__dict__.setdefault('traces', []).append(trace) if is_root: trace = ' -> '.join(reversed(exc.traces)) message = '' try: message = exc.message except AttributeError: pass exc = AssertionError("%s\nTRACE: %s" % (message, trace)) raise exc def assert_warning_in_record(warning_message, warn_record): """Assert that a warning message is in the records.""" all_messages = [str(w.message) for w in warn_record] assert warning_message in all_messages
# Author: Denis A. Engemann <denis.engemann@gmail.com> # Victoria Peterson <victoriapeterson09@gmail.com> # License: BSD (3-clause) import numpy as np import pytest from numpy.testing import (assert_array_almost_equal, assert_array_equal) from mne import io from mne.time_frequency import psd_array_welch from mne.decoding.ssd import SSD from mne.utils import requires_sklearn from mne.filter import filter_data from mne import create_info from mne.decoding import CSP freqs_sig = 9, 12 freqs_noise = 8, 13 def simulate_data(freqs_sig=[9, 12], n_trials=100, n_channels=20, n_samples=500, samples_per_second=250, n_components=5, SNR=0.05, random_state=42): """Simulate data according to an instantaneous mixin model. Data are simulated in the statistical source space, where n=n_components sources contain the peak of interest. """ rng = np.random.RandomState(random_state) filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1], l_trans_bandwidth=1, h_trans_bandwidth=1, fir_design='firwin') # generate an orthogonal mixin matrix mixing_mat = np.linalg.svd(rng.randn(n_channels, n_channels))[0] # define sources S_s = rng.randn(n_trials * n_samples, n_components) # filter source in the specific freq. band of interest S_s = filter_data(S_s.T, samples_per_second, **filt_params_signal).T S_n = rng.randn(n_trials * n_samples, n_channels - n_components) S = np.hstack((S_s, S_n)) # mix data X_s = np.dot(mixing_mat[:, :n_components], S_s.T).T X_n = np.dot(mixing_mat[:, n_components:], S_n.T).T # add noise X_s = X_s / np.linalg.norm(X_s, 'fro') X_n = X_n / np.linalg.norm(X_n, 'fro') X = SNR * X_s + (1 - SNR) * X_n X = X.T S = S.T return X, mixing_mat, S @pytest.mark.slowtest def test_ssd(): """Test Common Spatial Patterns algorithm on raw data.""" X, A, S = simulate_data() sf = 250 n_channels = X.shape[0] info = create_info(ch_names=n_channels, sfreq=sf, ch_types='eeg') n_components_true = 5 # Init filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1], l_trans_bandwidth=1, h_trans_bandwidth=1) filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1], l_trans_bandwidth=1, h_trans_bandwidth=1) ssd = SSD(info, filt_params_signal, filt_params_noise) # freq no int freq = 'foo' filt_params_signal = dict(l_freq=freq, h_freq=freqs_sig[1], l_trans_bandwidth=1, h_trans_bandwidth=1) filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1], l_trans_bandwidth=1, h_trans_bandwidth=1) with pytest.raises(TypeError, match='must be an instance '): ssd = SSD(info, filt_params_signal, filt_params_noise) # Wrongly specified noise band freq = 2 filt_params_signal = dict(l_freq=freq, h_freq=freqs_sig[1], l_trans_bandwidth=1, h_trans_bandwidth=1) filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1], l_trans_bandwidth=1, h_trans_bandwidth=1) with pytest.raises(ValueError, match='Wrongly specified '): ssd = SSD(info, filt_params_signal, filt_params_noise) # filt param no dict filt_params_signal = freqs_sig filt_params_noise = freqs_noise with pytest.raises(ValueError, match='must be defined'): ssd = SSD(info, filt_params_signal, filt_params_noise) # Data type filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1], l_trans_bandwidth=1, h_trans_bandwidth=1) filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1], l_trans_bandwidth=1, h_trans_bandwidth=1) ssd = SSD(info, filt_params_signal, filt_params_noise) raw = io.RawArray(X, info) pytest.raises(TypeError, ssd.fit, raw) # More than 1 channel type ch_types = np.reshape([['mag'] * 10, ['eeg'] * 10], n_channels) info_2 = create_info(ch_names=n_channels, sfreq=sf, ch_types=ch_types) with pytest.raises(ValueError, match='At this point SSD'): ssd = SSD(info_2, filt_params_signal, filt_params_noise) # Number of channels info_3 = create_info(ch_names=n_channels + 1, sfreq=sf, ch_types='eeg') ssd = SSD(info_3, filt_params_signal, filt_params_noise) pytest.raises(ValueError, ssd.fit, X) # Fit n_components = 10 ssd = SSD(info, filt_params_signal, filt_params_noise, n_components=n_components) # Call transform before fit pytest.raises(AttributeError, ssd.transform, X) # Check outputs ssd.fit(X) assert (ssd.filters_.shape == (n_channels, n_channels)) assert (ssd.patterns_.shape == (n_channels, n_channels)) # Transform X_ssd = ssd.fit_transform(X) assert (X_ssd.shape[0] == n_components) # back and forward ssd = SSD(info, filt_params_signal, filt_params_noise, n_components=None, sort_by_spectral_ratio=False) ssd.fit(X) X_denoised = ssd.apply(X) assert_array_almost_equal(X_denoised, X) # Power ratio ordering spec_ratio, _ = ssd.get_spectral_ratio(ssd.transform(X)) # since we now that the number of true components is 5, the relative # difference should be low for the first 5 components and then increases index_diff = np.argmax(-np.diff(spec_ratio)) assert index_diff == n_components_true - 1 # Check detected peaks # fit ssd n_components = n_components_true filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1], l_trans_bandwidth=1, h_trans_bandwidth=1) filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1], l_trans_bandwidth=1, h_trans_bandwidth=1) ssd = SSD(info, filt_params_signal, filt_params_noise, n_components=n_components, sort_by_spectral_ratio=False) ssd.fit(X) out = ssd.transform(X) psd_out, _ = psd_array_welch(out[0], sfreq=250, n_fft=250) psd_S, _ = psd_array_welch(S[0], sfreq=250, n_fft=250) corr = np.abs(np.corrcoef((psd_out, psd_S))[0, 1]) assert np.abs(corr) > 0.95 # Check pattern estimation # Since there is no exact ordering of the recovered patterns # a pair-wise greedy search will be done error = list() for ii in range(n_channels): corr = np.abs(np.corrcoef(ssd.patterns_[ii, :].T, A[:, 0])[0, 1]) error.append(1 - corr) min_err = np.min(error) assert min_err < 0.3 # threshold taken from SSD original paper def test_ssd_epoched_data(): """Test Common Spatial Patterns algorithm on epoched data. Compare the outputs when raw data is used. """ X, A, S = simulate_data(n_trials=100, n_channels=20, n_samples=500) sf = 250 n_channels = X.shape[0] info = create_info(ch_names=n_channels, sfreq=sf, ch_types='eeg') n_components_true = 5 # Build epochs as sliding windows over the continuous raw file # Epoch length is 1 second X_e = np.reshape(X, (100, 20, 500)) # Fit filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1], l_trans_bandwidth=4, h_trans_bandwidth=4) filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1], l_trans_bandwidth=4, h_trans_bandwidth=4) # ssd on epochs ssd_e = SSD(info, filt_params_signal, filt_params_noise) ssd_e.fit(X_e) # ssd on raw ssd = SSD(info, filt_params_signal, filt_params_noise) ssd.fit(X) # Check if the 5 first 5 components are the same for both _, sorter_spec_e = ssd_e.get_spectral_ratio(ssd_e.transform(X_e)) _, sorter_spec = ssd.get_spectral_ratio(ssd.transform(X)) assert_array_equal(sorter_spec_e[:n_components_true], sorter_spec[:n_components_true]) @requires_sklearn def test_ssd_pipeline(): """Test if SSD works in a pipeline.""" from sklearn.pipeline import Pipeline sf = 250 X, A, S = simulate_data(n_trials=100, n_channels=20, n_samples=500) X_e = np.reshape(X, (100, 20, 500)) # define bynary random output y = np.random.randint(2, size=100) info = create_info(ch_names=20, sfreq=sf, ch_types='eeg') filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1], l_trans_bandwidth=4, h_trans_bandwidth=4) filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1], l_trans_bandwidth=4, h_trans_bandwidth=4) ssd = SSD(info, filt_params_signal, filt_params_noise) csp = CSP() pipe = Pipeline([('SSD', ssd), ('CSP', csp)]) pipe.set_params(SSD__n_components=5) pipe.set_params(CSP__n_components=2) out = pipe.fit_transform(X_e, y) assert (out.shape == (100, 2)) assert (pipe.get_params()['SSD__n_components'] == 5)
olafhauk/mne-python
mne/decoding/tests/test_ssd.py
mne/io/fieldtrip/tests/helpers.py
# Authors: Eric Larson <larson.eric.d@gmail.com> # License: BSD Style. from .commands.utils import main if __name__ == '__main__': main()
# Author: Denis A. Engemann <denis.engemann@gmail.com> # Victoria Peterson <victoriapeterson09@gmail.com> # License: BSD (3-clause) import numpy as np import pytest from numpy.testing import (assert_array_almost_equal, assert_array_equal) from mne import io from mne.time_frequency import psd_array_welch from mne.decoding.ssd import SSD from mne.utils import requires_sklearn from mne.filter import filter_data from mne import create_info from mne.decoding import CSP freqs_sig = 9, 12 freqs_noise = 8, 13 def simulate_data(freqs_sig=[9, 12], n_trials=100, n_channels=20, n_samples=500, samples_per_second=250, n_components=5, SNR=0.05, random_state=42): """Simulate data according to an instantaneous mixin model. Data are simulated in the statistical source space, where n=n_components sources contain the peak of interest. """ rng = np.random.RandomState(random_state) filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1], l_trans_bandwidth=1, h_trans_bandwidth=1, fir_design='firwin') # generate an orthogonal mixin matrix mixing_mat = np.linalg.svd(rng.randn(n_channels, n_channels))[0] # define sources S_s = rng.randn(n_trials * n_samples, n_components) # filter source in the specific freq. band of interest S_s = filter_data(S_s.T, samples_per_second, **filt_params_signal).T S_n = rng.randn(n_trials * n_samples, n_channels - n_components) S = np.hstack((S_s, S_n)) # mix data X_s = np.dot(mixing_mat[:, :n_components], S_s.T).T X_n = np.dot(mixing_mat[:, n_components:], S_n.T).T # add noise X_s = X_s / np.linalg.norm(X_s, 'fro') X_n = X_n / np.linalg.norm(X_n, 'fro') X = SNR * X_s + (1 - SNR) * X_n X = X.T S = S.T return X, mixing_mat, S @pytest.mark.slowtest def test_ssd(): """Test Common Spatial Patterns algorithm on raw data.""" X, A, S = simulate_data() sf = 250 n_channels = X.shape[0] info = create_info(ch_names=n_channels, sfreq=sf, ch_types='eeg') n_components_true = 5 # Init filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1], l_trans_bandwidth=1, h_trans_bandwidth=1) filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1], l_trans_bandwidth=1, h_trans_bandwidth=1) ssd = SSD(info, filt_params_signal, filt_params_noise) # freq no int freq = 'foo' filt_params_signal = dict(l_freq=freq, h_freq=freqs_sig[1], l_trans_bandwidth=1, h_trans_bandwidth=1) filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1], l_trans_bandwidth=1, h_trans_bandwidth=1) with pytest.raises(TypeError, match='must be an instance '): ssd = SSD(info, filt_params_signal, filt_params_noise) # Wrongly specified noise band freq = 2 filt_params_signal = dict(l_freq=freq, h_freq=freqs_sig[1], l_trans_bandwidth=1, h_trans_bandwidth=1) filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1], l_trans_bandwidth=1, h_trans_bandwidth=1) with pytest.raises(ValueError, match='Wrongly specified '): ssd = SSD(info, filt_params_signal, filt_params_noise) # filt param no dict filt_params_signal = freqs_sig filt_params_noise = freqs_noise with pytest.raises(ValueError, match='must be defined'): ssd = SSD(info, filt_params_signal, filt_params_noise) # Data type filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1], l_trans_bandwidth=1, h_trans_bandwidth=1) filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1], l_trans_bandwidth=1, h_trans_bandwidth=1) ssd = SSD(info, filt_params_signal, filt_params_noise) raw = io.RawArray(X, info) pytest.raises(TypeError, ssd.fit, raw) # More than 1 channel type ch_types = np.reshape([['mag'] * 10, ['eeg'] * 10], n_channels) info_2 = create_info(ch_names=n_channels, sfreq=sf, ch_types=ch_types) with pytest.raises(ValueError, match='At this point SSD'): ssd = SSD(info_2, filt_params_signal, filt_params_noise) # Number of channels info_3 = create_info(ch_names=n_channels + 1, sfreq=sf, ch_types='eeg') ssd = SSD(info_3, filt_params_signal, filt_params_noise) pytest.raises(ValueError, ssd.fit, X) # Fit n_components = 10 ssd = SSD(info, filt_params_signal, filt_params_noise, n_components=n_components) # Call transform before fit pytest.raises(AttributeError, ssd.transform, X) # Check outputs ssd.fit(X) assert (ssd.filters_.shape == (n_channels, n_channels)) assert (ssd.patterns_.shape == (n_channels, n_channels)) # Transform X_ssd = ssd.fit_transform(X) assert (X_ssd.shape[0] == n_components) # back and forward ssd = SSD(info, filt_params_signal, filt_params_noise, n_components=None, sort_by_spectral_ratio=False) ssd.fit(X) X_denoised = ssd.apply(X) assert_array_almost_equal(X_denoised, X) # Power ratio ordering spec_ratio, _ = ssd.get_spectral_ratio(ssd.transform(X)) # since we now that the number of true components is 5, the relative # difference should be low for the first 5 components and then increases index_diff = np.argmax(-np.diff(spec_ratio)) assert index_diff == n_components_true - 1 # Check detected peaks # fit ssd n_components = n_components_true filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1], l_trans_bandwidth=1, h_trans_bandwidth=1) filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1], l_trans_bandwidth=1, h_trans_bandwidth=1) ssd = SSD(info, filt_params_signal, filt_params_noise, n_components=n_components, sort_by_spectral_ratio=False) ssd.fit(X) out = ssd.transform(X) psd_out, _ = psd_array_welch(out[0], sfreq=250, n_fft=250) psd_S, _ = psd_array_welch(S[0], sfreq=250, n_fft=250) corr = np.abs(np.corrcoef((psd_out, psd_S))[0, 1]) assert np.abs(corr) > 0.95 # Check pattern estimation # Since there is no exact ordering of the recovered patterns # a pair-wise greedy search will be done error = list() for ii in range(n_channels): corr = np.abs(np.corrcoef(ssd.patterns_[ii, :].T, A[:, 0])[0, 1]) error.append(1 - corr) min_err = np.min(error) assert min_err < 0.3 # threshold taken from SSD original paper def test_ssd_epoched_data(): """Test Common Spatial Patterns algorithm on epoched data. Compare the outputs when raw data is used. """ X, A, S = simulate_data(n_trials=100, n_channels=20, n_samples=500) sf = 250 n_channels = X.shape[0] info = create_info(ch_names=n_channels, sfreq=sf, ch_types='eeg') n_components_true = 5 # Build epochs as sliding windows over the continuous raw file # Epoch length is 1 second X_e = np.reshape(X, (100, 20, 500)) # Fit filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1], l_trans_bandwidth=4, h_trans_bandwidth=4) filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1], l_trans_bandwidth=4, h_trans_bandwidth=4) # ssd on epochs ssd_e = SSD(info, filt_params_signal, filt_params_noise) ssd_e.fit(X_e) # ssd on raw ssd = SSD(info, filt_params_signal, filt_params_noise) ssd.fit(X) # Check if the 5 first 5 components are the same for both _, sorter_spec_e = ssd_e.get_spectral_ratio(ssd_e.transform(X_e)) _, sorter_spec = ssd.get_spectral_ratio(ssd.transform(X)) assert_array_equal(sorter_spec_e[:n_components_true], sorter_spec[:n_components_true]) @requires_sklearn def test_ssd_pipeline(): """Test if SSD works in a pipeline.""" from sklearn.pipeline import Pipeline sf = 250 X, A, S = simulate_data(n_trials=100, n_channels=20, n_samples=500) X_e = np.reshape(X, (100, 20, 500)) # define bynary random output y = np.random.randint(2, size=100) info = create_info(ch_names=20, sfreq=sf, ch_types='eeg') filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1], l_trans_bandwidth=4, h_trans_bandwidth=4) filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1], l_trans_bandwidth=4, h_trans_bandwidth=4) ssd = SSD(info, filt_params_signal, filt_params_noise) csp = CSP() pipe = Pipeline([('SSD', ssd), ('CSP', csp)]) pipe.set_params(SSD__n_components=5) pipe.set_params(CSP__n_components=2) out = pipe.fit_transform(X_e, y) assert (out.shape == (100, 2)) assert (pipe.get_params()['SSD__n_components'] == 5)
olafhauk/mne-python
mne/decoding/tests/test_ssd.py
mne/__main__.py
"""Config flow for Mobile App.""" import uuid from homeassistant import config_entries from homeassistant.components import person from homeassistant.helpers import entity_registry from .const import ATTR_APP_ID, ATTR_DEVICE_ID, ATTR_DEVICE_NAME, CONF_USER_ID, DOMAIN @config_entries.HANDLERS.register(DOMAIN) class MobileAppFlowHandler(config_entries.ConfigFlow): """Handle a Mobile App config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_PUSH async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" placeholders = { "apps_url": "https://www.home-assistant.io/integrations/mobile_app/#apps" } return self.async_abort( reason="install_app", description_placeholders=placeholders ) async def async_step_registration(self, user_input=None): """Handle a flow initialized during registration.""" if ATTR_DEVICE_ID in user_input: # Unique ID is combi of app + device ID. await self.async_set_unique_id( f"{user_input[ATTR_APP_ID]}-{user_input[ATTR_DEVICE_ID]}" ) else: user_input[ATTR_DEVICE_ID] = str(uuid.uuid4()).replace("-", "") # Register device tracker entity and add to person registering app ent_reg = await entity_registry.async_get_registry(self.hass) devt_entry = ent_reg.async_get_or_create( "device_tracker", DOMAIN, user_input[ATTR_DEVICE_ID], suggested_object_id=user_input[ATTR_DEVICE_NAME], ) await person.async_add_user_device_tracker( self.hass, user_input[CONF_USER_ID], devt_entry.entity_id ) return self.async_create_entry( title=user_input[ATTR_DEVICE_NAME], data=user_input )
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/mobile_app/config_flow.py
"""Config flow for Elexa Guardian integration.""" from aioguardian import Client from aioguardian.errors import GuardianError import voluptuous as vol from homeassistant import config_entries, core from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT from homeassistant.core import callback from .const import CONF_UID, DOMAIN, LOGGER # pylint:disable=unused-import DATA_SCHEMA = vol.Schema( {vol.Required(CONF_IP_ADDRESS): str, vol.Required(CONF_PORT, default=7777): int} ) UNIQUE_ID = "guardian_{0}" @callback def async_get_pin_from_discovery_hostname(hostname): """Get the device's 4-digit PIN from its zeroconf-discovered hostname.""" return hostname.split(".")[0].split("-")[1] @callback def async_get_pin_from_uid(uid): """Get the device's 4-digit PIN from its UID.""" return uid[-4:] async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. """ async with Client(data[CONF_IP_ADDRESS]) as client: ping_data = await client.system.ping() return { CONF_UID: ping_data["data"]["uid"], } class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Elexa Guardian.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize.""" self.discovery_info = {} async def _async_set_unique_id(self, pin): """Set the config entry's unique ID (based on the device's 4-digit PIN).""" await self.async_set_unique_id(UNIQUE_ID.format(pin)) self._abort_if_unique_id_configured() async def async_step_user(self, user_input=None): """Handle configuration via the UI.""" if user_input is None: return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors={} ) try: info = await validate_input(self.hass, user_input) except GuardianError as err: LOGGER.error("Error while connecting to unit: %s", err) return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors={CONF_IP_ADDRESS: "cannot_connect"}, ) pin = async_get_pin_from_uid(info[CONF_UID]) await self._async_set_unique_id(pin) return self.async_create_entry( title=info[CONF_UID], data={CONF_UID: info["uid"], **user_input} ) async def async_step_zeroconf(self, discovery_info): """Handle the configuration via zeroconf.""" if discovery_info is None: return self.async_abort(reason="connection_error") pin = async_get_pin_from_discovery_hostname(discovery_info["hostname"]) await self._async_set_unique_id(pin) # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 self.context[CONF_IP_ADDRESS] = discovery_info["host"] if any( discovery_info["host"] == flow["context"][CONF_IP_ADDRESS] for flow in self._async_in_progress() ): return self.async_abort(reason="already_in_progress") self.discovery_info = { CONF_IP_ADDRESS: discovery_info["host"], CONF_PORT: discovery_info["port"], } return await self.async_step_zeroconf_confirm() async def async_step_zeroconf_confirm(self, user_input=None): """Finish the configuration via zeroconf.""" if user_input is None: return self.async_show_form(step_id="zeroconf_confirm") return await self.async_step_user(self.discovery_info)
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/guardian/config_flow.py
"""Config flow for Monoprice 6-Zone Amplifier integration.""" import logging from pymonoprice import get_async_monoprice from serial import SerialException import voluptuous as vol from homeassistant import config_entries, core, exceptions from homeassistant.const import CONF_PORT from .const import ( CONF_SOURCE_1, CONF_SOURCE_2, CONF_SOURCE_3, CONF_SOURCE_4, CONF_SOURCE_5, CONF_SOURCE_6, CONF_SOURCES, ) from .const import DOMAIN # pylint:disable=unused-import _LOGGER = logging.getLogger(__name__) SOURCES = [ CONF_SOURCE_1, CONF_SOURCE_2, CONF_SOURCE_3, CONF_SOURCE_4, CONF_SOURCE_5, CONF_SOURCE_6, ] OPTIONS_FOR_DATA = {vol.Optional(source): str for source in SOURCES} DATA_SCHEMA = vol.Schema({vol.Required(CONF_PORT): str, **OPTIONS_FOR_DATA}) @core.callback def _sources_from_config(data): sources_config = { str(idx + 1): data.get(source) for idx, source in enumerate(SOURCES) } return { index: name.strip() for index, name in sources_config.items() if (name is not None and name.strip() != "") } async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. """ try: await get_async_monoprice(data[CONF_PORT], hass.loop) except SerialException: _LOGGER.error("Error connecting to Monoprice controller") raise CannotConnect sources = _sources_from_config(data) # Return info that you want to store in the config entry. return {CONF_PORT: data[CONF_PORT], CONF_SOURCES: sources} class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Monoprice 6-Zone Amplifier.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: try: info = await validate_input(self.hass, user_input) return self.async_create_entry(title=user_input[CONF_PORT], data=info) except CannotConnect: errors["base"] = "cannot_connect" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors ) @staticmethod @core.callback def async_get_options_flow(config_entry): """Define the config flow to handle options.""" return MonopriceOptionsFlowHandler(config_entry) @core.callback def _key_for_source(index, source, previous_sources): if str(index) in previous_sources: key = vol.Optional( source, description={"suggested_value": previous_sources[str(index)]} ) else: key = vol.Optional(source) return key class MonopriceOptionsFlowHandler(config_entries.OptionsFlow): """Handle a Monoprice options flow.""" def __init__(self, config_entry): """Initialize.""" self.config_entry = config_entry @core.callback def _previous_sources(self): if CONF_SOURCES in self.config_entry.options: previous = self.config_entry.options[CONF_SOURCES] else: previous = self.config_entry.data[CONF_SOURCES] return previous async def async_step_init(self, user_input=None): """Manage the options.""" if user_input is not None: return self.async_create_entry( title="", data={CONF_SOURCES: _sources_from_config(user_input)} ) previous_sources = self._previous_sources() options = { _key_for_source(idx + 1, source, previous_sources): str for idx, source in enumerate(SOURCES) } return self.async_show_form(step_id="init", data_schema=vol.Schema(options),) class CannotConnect(exceptions.HomeAssistantError): """Error to indicate we cannot connect."""
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/monoprice/config_flow.py
"""Support for Gogogate2 garage Doors.""" import logging from typing import Callable, List, Optional from gogogate2_api.common import Door, DoorStatus, get_configured_doors, get_door_by_id import voluptuous as vol from homeassistant.components.cover import ( DEVICE_CLASS_GARAGE, SUPPORT_CLOSE, SUPPORT_OPEN, CoverEntity, ) from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from .common import ( GogoGateDataUpdateCoordinator, cover_unique_id, get_data_update_coordinator, ) from .const import DOMAIN _LOGGER = logging.getLogger(__name__) COVER_SCHEMA = vol.Schema( { vol.Required(CONF_IP_ADDRESS): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_USERNAME): cv.string, } ) async def async_setup_platform( hass: HomeAssistant, config: dict, add_entities: Callable, discovery_info=None ) -> None: """Convert old style file configs to new style configs.""" hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=config ) ) async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: Callable[[List[Entity], Optional[bool]], None], ) -> None: """Set up the config entry.""" data_update_coordinator = get_data_update_coordinator(hass, config_entry) async_add_entities( [ Gogogate2Cover(config_entry, data_update_coordinator, door) for door in get_configured_doors(data_update_coordinator.data) ] ) class Gogogate2Cover(CoverEntity): """Cover entity for goggate2.""" def __init__( self, config_entry: ConfigEntry, data_update_coordinator: GogoGateDataUpdateCoordinator, door: Door, ) -> None: """Initialize the object.""" self._config_entry = config_entry self._data_update_coordinator = data_update_coordinator self._door = door self._api = data_update_coordinator.api self._unique_id = cover_unique_id(config_entry, door) self._is_available = True @property def available(self) -> bool: """Return True if entity is available.""" return self._is_available @property def should_poll(self) -> bool: """Return False as the data manager handles dispatching data.""" return False @property def unique_id(self) -> Optional[str]: """Return a unique ID.""" return self._unique_id @property def name(self): """Return the name of the door.""" return self._door.name @property def is_closed(self): """Return true if cover is closed, else False.""" if self._door.status == DoorStatus.OPENED: return False if self._door.status == DoorStatus.CLOSED: return True return None @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return DEVICE_CLASS_GARAGE @property def supported_features(self): """Flag supported features.""" return SUPPORT_OPEN | SUPPORT_CLOSE async def async_open_cover(self, **kwargs): """Open the door.""" await self.hass.async_add_executor_job(self._api.open_door, self._door.door_id) async def async_close_cover(self, **kwargs): """Close the door.""" await self.hass.async_add_executor_job(self._api.close_door, self._door.door_id) @property def state_attributes(self): """Return the state attributes.""" attrs = super().state_attributes attrs["door_id"] = self._door.door_id return attrs @callback def async_on_data_updated(self) -> None: """Receive data from data dispatcher.""" if not self._data_update_coordinator.last_update_success: self._is_available = False self.async_write_ha_state() return door = get_door_by_id(self._door.door_id, self._data_update_coordinator.data) # Set the state. self._door = door self._is_available = True self.async_write_ha_state() async def async_added_to_hass(self) -> None: """Register update dispatcher.""" self.async_on_remove( self._data_update_coordinator.async_add_listener(self.async_on_data_updated) )
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/gogogate2/cover.py
"""Allows to configure a switch using RPi GPIO.""" import logging import voluptuous as vol from homeassistant.components import rpi_gpio from homeassistant.components.switch import PLATFORM_SCHEMA from homeassistant.const import DEVICE_DEFAULT_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import ToggleEntity _LOGGER = logging.getLogger(__name__) CONF_PULL_MODE = "pull_mode" CONF_PORTS = "ports" CONF_INVERT_LOGIC = "invert_logic" DEFAULT_INVERT_LOGIC = False _SWITCHES_SCHEMA = vol.Schema({cv.positive_int: cv.string}) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_PORTS): _SWITCHES_SCHEMA, vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Raspberry PI GPIO devices.""" invert_logic = config.get(CONF_INVERT_LOGIC) switches = [] ports = config.get(CONF_PORTS) for port, name in ports.items(): switches.append(RPiGPIOSwitch(name, port, invert_logic)) add_entities(switches) class RPiGPIOSwitch(ToggleEntity): """Representation of a Raspberry Pi GPIO.""" def __init__(self, name, port, invert_logic): """Initialize the pin.""" self._name = name or DEVICE_DEFAULT_NAME self._port = port self._invert_logic = invert_logic self._state = False rpi_gpio.setup_output(self._port) rpi_gpio.write_output(self._port, 1 if self._invert_logic else 0) @property def name(self): """Return the name of the switch.""" return self._name @property def should_poll(self): """No polling needed.""" return False @property def is_on(self): """Return true if device is on.""" return self._state def turn_on(self, **kwargs): """Turn the device on.""" rpi_gpio.write_output(self._port, 0 if self._invert_logic else 1) self._state = True self.schedule_update_ha_state() def turn_off(self, **kwargs): """Turn the device off.""" rpi_gpio.write_output(self._port, 1 if self._invert_logic else 0) self._state = False self.schedule_update_ha_state()
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/rpi_gpio/switch.py
"""Support for the OpenWeatherMap (OWM) service.""" from datetime import timedelta import logging from pyowm import OWM from pyowm.exceptions.api_call_error import APICallError import voluptuous as vol from homeassistant.components.weather import ( ATTR_FORECAST_CONDITION, ATTR_FORECAST_PRECIPITATION, ATTR_FORECAST_TEMP, ATTR_FORECAST_TEMP_LOW, ATTR_FORECAST_TIME, ATTR_FORECAST_WIND_BEARING, ATTR_FORECAST_WIND_SPEED, PLATFORM_SCHEMA, WeatherEntity, ) from homeassistant.const import ( CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_MODE, CONF_NAME, PRESSURE_HPA, PRESSURE_INHG, STATE_UNKNOWN, TEMP_CELSIUS, ) import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle from homeassistant.util.pressure import convert as convert_pressure _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Data provided by OpenWeatherMap" FORECAST_MODE = ["hourly", "daily", "freedaily"] DEFAULT_NAME = "OpenWeatherMap" MIN_TIME_BETWEEN_FORECAST_UPDATES = timedelta(minutes=30) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=10) CONDITION_CLASSES = { "cloudy": [803, 804], "fog": [701, 741], "hail": [906], "lightning": [210, 211, 212, 221], "lightning-rainy": [200, 201, 202, 230, 231, 232], "partlycloudy": [801, 802], "pouring": [504, 314, 502, 503, 522], "rainy": [300, 301, 302, 310, 311, 312, 313, 500, 501, 520, 521], "snowy": [600, 601, 602, 611, 612, 620, 621, 622], "snowy-rainy": [511, 615, 616], "sunny": [800], "windy": [905, 951, 952, 953, 954, 955, 956, 957], "windy-variant": [958, 959, 960, 961], "exceptional": [711, 721, 731, 751, 761, 762, 771, 900, 901, 962, 903, 904], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_LATITUDE): cv.latitude, vol.Optional(CONF_LONGITUDE): cv.longitude, vol.Optional(CONF_MODE, default="hourly"): vol.In(FORECAST_MODE), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the OpenWeatherMap weather platform.""" longitude = config.get(CONF_LONGITUDE, round(hass.config.longitude, 5)) latitude = config.get(CONF_LATITUDE, round(hass.config.latitude, 5)) name = config.get(CONF_NAME) mode = config.get(CONF_MODE) try: owm = OWM(config.get(CONF_API_KEY)) except APICallError: _LOGGER.error("Error while connecting to OpenWeatherMap") return False data = WeatherData(owm, latitude, longitude, mode) add_entities( [OpenWeatherMapWeather(name, data, hass.config.units.temperature_unit, mode)], True, ) class OpenWeatherMapWeather(WeatherEntity): """Implementation of an OpenWeatherMap sensor.""" def __init__(self, name, owm, temperature_unit, mode): """Initialize the sensor.""" self._name = name self._owm = owm self._temperature_unit = temperature_unit self._mode = mode self.data = None self.forecast_data = None @property def name(self): """Return the name of the sensor.""" return self._name @property def condition(self): """Return the current condition.""" try: return [ k for k, v in CONDITION_CLASSES.items() if self.data.get_weather_code() in v ][0] except IndexError: return STATE_UNKNOWN @property def temperature(self): """Return the temperature.""" return self.data.get_temperature("celsius").get("temp") @property def temperature_unit(self): """Return the unit of measurement.""" return TEMP_CELSIUS @property def pressure(self): """Return the pressure.""" pressure = self.data.get_pressure().get("press") if self.hass.config.units.name == "imperial": return round(convert_pressure(pressure, PRESSURE_HPA, PRESSURE_INHG), 2) return pressure @property def humidity(self): """Return the humidity.""" return self.data.get_humidity() @property def wind_speed(self): """Return the wind speed.""" if self.hass.config.units.name == "imperial": return round(self.data.get_wind().get("speed") * 2.24, 2) return round(self.data.get_wind().get("speed") * 3.6, 2) @property def wind_bearing(self): """Return the wind bearing.""" return self.data.get_wind().get("deg") @property def attribution(self): """Return the attribution.""" return ATTRIBUTION @property def forecast(self): """Return the forecast array.""" data = [] def calc_precipitation(rain, snow): """Calculate the precipitation.""" rain_value = 0 if rain is None else rain snow_value = 0 if snow is None else snow if round(rain_value + snow_value, 1) == 0: return None return round(rain_value + snow_value, 1) if self._mode == "freedaily": weather = self.forecast_data.get_weathers()[::8] else: weather = self.forecast_data.get_weathers() for entry in weather: if self._mode == "daily": data.append( { ATTR_FORECAST_TIME: entry.get_reference_time("unix") * 1000, ATTR_FORECAST_TEMP: entry.get_temperature("celsius").get("day"), ATTR_FORECAST_TEMP_LOW: entry.get_temperature("celsius").get( "night" ), ATTR_FORECAST_PRECIPITATION: calc_precipitation( entry.get_rain().get("all"), entry.get_snow().get("all") ), ATTR_FORECAST_WIND_SPEED: entry.get_wind().get("speed"), ATTR_FORECAST_WIND_BEARING: entry.get_wind().get("deg"), ATTR_FORECAST_CONDITION: [ k for k, v in CONDITION_CLASSES.items() if entry.get_weather_code() in v ][0], } ) else: data.append( { ATTR_FORECAST_TIME: entry.get_reference_time("unix") * 1000, ATTR_FORECAST_TEMP: entry.get_temperature("celsius").get( "temp" ), ATTR_FORECAST_PRECIPITATION: ( round(entry.get_rain().get("3h"), 1) if entry.get_rain().get("3h") is not None and (round(entry.get_rain().get("3h"), 1) > 0) else None ), ATTR_FORECAST_CONDITION: [ k for k, v in CONDITION_CLASSES.items() if entry.get_weather_code() in v ][0], } ) return data def update(self): """Get the latest data from OWM and updates the states.""" try: self._owm.update() self._owm.update_forecast() except APICallError: _LOGGER.error("Exception when calling OWM web API to update data") return self.data = self._owm.data self.forecast_data = self._owm.forecast_data class WeatherData: """Get the latest data from OpenWeatherMap.""" def __init__(self, owm, latitude, longitude, mode): """Initialize the data object.""" self._mode = mode self.owm = owm self.latitude = latitude self.longitude = longitude self.data = None self.forecast_data = None @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data from OpenWeatherMap.""" obs = self.owm.weather_at_coords(self.latitude, self.longitude) if obs is None: _LOGGER.warning("Failed to fetch data from OWM") return self.data = obs.get_weather() @Throttle(MIN_TIME_BETWEEN_FORECAST_UPDATES) def update_forecast(self): """Get the latest forecast from OpenWeatherMap.""" try: if self._mode == "daily": fcd = self.owm.daily_forecast_at_coords( self.latitude, self.longitude, 15 ) else: fcd = self.owm.three_hours_forecast_at_coords( self.latitude, self.longitude ) except APICallError: _LOGGER.error("Exception when calling OWM web API to update forecast") return if fcd is None: _LOGGER.warning("Failed to fetch forecast data from OWM") return self.forecast_data = fcd.get_forecast()
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/openweathermap/weather.py
"""Syslog notification service.""" import logging import syslog import voluptuous as vol from homeassistant.components.notify import ( ATTR_TITLE, ATTR_TITLE_DEFAULT, PLATFORM_SCHEMA, BaseNotificationService, ) _LOGGER = logging.getLogger(__name__) CONF_FACILITY = "facility" CONF_OPTION = "option" CONF_PRIORITY = "priority" SYSLOG_FACILITY = { "kernel": "LOG_KERN", "user": "LOG_USER", "mail": "LOG_MAIL", "daemon": "LOG_DAEMON", "auth": "LOG_KERN", "LPR": "LOG_LPR", "news": "LOG_NEWS", "uucp": "LOG_UUCP", "cron": "LOG_CRON", "syslog": "LOG_SYSLOG", "local0": "LOG_LOCAL0", "local1": "LOG_LOCAL1", "local2": "LOG_LOCAL2", "local3": "LOG_LOCAL3", "local4": "LOG_LOCAL4", "local5": "LOG_LOCAL5", "local6": "LOG_LOCAL6", "local7": "LOG_LOCAL7", } SYSLOG_OPTION = { "pid": "LOG_PID", "cons": "LOG_CONS", "ndelay": "LOG_NDELAY", "nowait": "LOG_NOWAIT", "perror": "LOG_PERROR", } SYSLOG_PRIORITY = { 5: "LOG_EMERG", 4: "LOG_ALERT", 3: "LOG_CRIT", 2: "LOG_ERR", 1: "LOG_WARNING", 0: "LOG_NOTICE", -1: "LOG_INFO", -2: "LOG_DEBUG", } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_FACILITY, default="syslog"): vol.In(SYSLOG_FACILITY.keys()), vol.Optional(CONF_OPTION, default="pid"): vol.In(SYSLOG_OPTION.keys()), vol.Optional(CONF_PRIORITY, default=-1): vol.In(SYSLOG_PRIORITY.keys()), } ) def get_service(hass, config, discovery_info=None): """Get the syslog notification service.""" facility = getattr(syslog, SYSLOG_FACILITY[config.get(CONF_FACILITY)]) option = getattr(syslog, SYSLOG_OPTION[config.get(CONF_OPTION)]) priority = getattr(syslog, SYSLOG_PRIORITY[config.get(CONF_PRIORITY)]) return SyslogNotificationService(facility, option, priority) class SyslogNotificationService(BaseNotificationService): """Implement the syslog notification service.""" def __init__(self, facility, option, priority): """Initialize the service.""" self._facility = facility self._option = option self._priority = priority def send_message(self, message="", **kwargs): """Send a message to a user.""" title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT) syslog.openlog(title, self._option, self._facility) syslog.syslog(self._priority, message) syslog.closelog()
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/syslog/notify.py
"""Config flow to configure the RainMachine component.""" from regenmaschine import login from regenmaschine.errors import RainMachineError import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_IP_ADDRESS, CONF_PASSWORD, CONF_PORT, CONF_SCAN_INTERVAL, CONF_SSL, ) from homeassistant.helpers import aiohttp_client from .const import ( # pylint: disable=unused-import CONF_ZONE_RUN_TIME, DEFAULT_PORT, DEFAULT_SCAN_INTERVAL, DEFAULT_ZONE_RUN, DOMAIN, ) class RainMachineFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a RainMachine config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize the config flow.""" self.data_schema = vol.Schema( { vol.Required(CONF_IP_ADDRESS): str, vol.Required(CONF_PASSWORD): str, vol.Optional(CONF_PORT, default=DEFAULT_PORT): int, } ) async def _show_form(self, errors=None): """Show the form to the user.""" return self.async_show_form( step_id="user", data_schema=self.data_schema, errors=errors if errors else {}, ) async def async_step_import(self, import_config): """Import a config entry from configuration.yaml.""" return await self.async_step_user(import_config) async def async_step_user(self, user_input=None): """Handle the start of the config flow.""" if not user_input: return await self._show_form() await self.async_set_unique_id(user_input[CONF_IP_ADDRESS]) self._abort_if_unique_id_configured() websession = aiohttp_client.async_get_clientsession(self.hass) try: await login( user_input[CONF_IP_ADDRESS], user_input[CONF_PASSWORD], websession, port=user_input[CONF_PORT], ssl=user_input.get(CONF_SSL, True), ) except RainMachineError: return await self._show_form({CONF_PASSWORD: "invalid_credentials"}) # Unfortunately, RainMachine doesn't provide a way to refresh the # access token without using the IP address and password, so we have to # store it: return self.async_create_entry( title=user_input[CONF_IP_ADDRESS], data={ CONF_IP_ADDRESS: user_input[CONF_IP_ADDRESS], CONF_PASSWORD: user_input[CONF_PASSWORD], CONF_PORT: user_input[CONF_PORT], CONF_SSL: user_input.get(CONF_SSL, True), CONF_SCAN_INTERVAL: user_input.get( CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL.total_seconds() ), CONF_ZONE_RUN_TIME: user_input.get( CONF_ZONE_RUN_TIME, DEFAULT_ZONE_RUN ), }, )
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/rainmachine/config_flow.py
"""Tracks the latency of a host by sending ICMP echo requests (ping).""" from datetime import timedelta import logging import re import subprocess import sys import voluptuous as vol from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity from homeassistant.const import CONF_HOST, CONF_NAME import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) ATTR_ROUND_TRIP_TIME_AVG = "round_trip_time_avg" ATTR_ROUND_TRIP_TIME_MAX = "round_trip_time_max" ATTR_ROUND_TRIP_TIME_MDEV = "round_trip_time_mdev" ATTR_ROUND_TRIP_TIME_MIN = "round_trip_time_min" CONF_PING_COUNT = "count" DEFAULT_NAME = "Ping Binary sensor" DEFAULT_PING_COUNT = 5 DEFAULT_DEVICE_CLASS = "connectivity" SCAN_INTERVAL = timedelta(minutes=5) PING_MATCHER = re.compile( r"(?P<min>\d+.\d+)\/(?P<avg>\d+.\d+)\/(?P<max>\d+.\d+)\/(?P<mdev>\d+.\d+)" ) PING_MATCHER_BUSYBOX = re.compile( r"(?P<min>\d+.\d+)\/(?P<avg>\d+.\d+)\/(?P<max>\d+.\d+)" ) WIN32_PING_MATCHER = re.compile(r"(?P<min>\d+)ms.+(?P<max>\d+)ms.+(?P<avg>\d+)ms") PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PING_COUNT, default=DEFAULT_PING_COUNT): cv.positive_int, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Ping Binary sensor.""" name = config.get(CONF_NAME) host = config.get(CONF_HOST) count = config.get(CONF_PING_COUNT) add_entities([PingBinarySensor(name, PingData(host, count))], True) class PingBinarySensor(BinarySensorEntity): """Representation of a Ping Binary sensor.""" def __init__(self, name, ping): """Initialize the Ping Binary sensor.""" self._name = name self.ping = ping @property def name(self): """Return the name of the device.""" return self._name @property def device_class(self): """Return the class of this sensor.""" return DEFAULT_DEVICE_CLASS @property def is_on(self): """Return true if the binary sensor is on.""" return self.ping.available @property def device_state_attributes(self): """Return the state attributes of the ICMP checo request.""" if self.ping.data is not False: return { ATTR_ROUND_TRIP_TIME_AVG: self.ping.data["avg"], ATTR_ROUND_TRIP_TIME_MAX: self.ping.data["max"], ATTR_ROUND_TRIP_TIME_MDEV: self.ping.data["mdev"], ATTR_ROUND_TRIP_TIME_MIN: self.ping.data["min"], } def update(self): """Get the latest data.""" self.ping.update() class PingData: """The Class for handling the data retrieval.""" def __init__(self, host, count): """Initialize the data object.""" self._ip_address = host self._count = count self.data = {} self.available = False if sys.platform == "win32": self._ping_cmd = [ "ping", "-n", str(self._count), "-w", "1000", self._ip_address, ] else: self._ping_cmd = [ "ping", "-n", "-q", "-c", str(self._count), "-W1", self._ip_address, ] def ping(self): """Send ICMP echo request and return details if success.""" pinger = subprocess.Popen( self._ping_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) try: out = pinger.communicate() _LOGGER.debug("Output is %s", str(out)) if sys.platform == "win32": match = WIN32_PING_MATCHER.search(str(out).split("\n")[-1]) rtt_min, rtt_avg, rtt_max = match.groups() return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": ""} if "max/" not in str(out): match = PING_MATCHER_BUSYBOX.search(str(out).split("\n")[-1]) rtt_min, rtt_avg, rtt_max = match.groups() return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": ""} match = PING_MATCHER.search(str(out).split("\n")[-1]) rtt_min, rtt_avg, rtt_max, rtt_mdev = match.groups() return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": rtt_mdev} except (subprocess.CalledProcessError, AttributeError): return False def update(self): """Retrieve the latest details from the host.""" self.data = self.ping() self.available = bool(self.data)
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/ping/binary_sensor.py
"""Support for Volvo heater.""" import logging from homeassistant.helpers.entity import ToggleEntity from . import DATA_KEY, VolvoEntity _LOGGER = logging.getLogger(__name__) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up a Volvo switch.""" if discovery_info is None: return async_add_entities([VolvoSwitch(hass.data[DATA_KEY], *discovery_info)]) class VolvoSwitch(VolvoEntity, ToggleEntity): """Representation of a Volvo switch.""" @property def is_on(self): """Return true if switch is on.""" return self.instrument.state async def async_turn_on(self, **kwargs): """Turn the switch on.""" await self.instrument.turn_on() self.async_write_ha_state() async def async_turn_off(self, **kwargs): """Turn the switch off.""" await self.instrument.turn_off() self.async_write_ha_state()
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/volvooncall/switch.py
"""Support for MySensors lights.""" from homeassistant.components import mysensors from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_HS_COLOR, ATTR_WHITE_VALUE, DOMAIN, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_WHITE_VALUE, LightEntity, ) from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import callback import homeassistant.util.color as color_util from homeassistant.util.color import rgb_hex_to_rgb_list SUPPORT_MYSENSORS_RGBW = SUPPORT_COLOR | SUPPORT_WHITE_VALUE async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the mysensors platform for lights.""" device_class_map = { "S_DIMMER": MySensorsLightDimmer, "S_RGB_LIGHT": MySensorsLightRGB, "S_RGBW_LIGHT": MySensorsLightRGBW, } mysensors.setup_mysensors_platform( hass, DOMAIN, discovery_info, device_class_map, async_add_entities=async_add_entities, ) class MySensorsLight(mysensors.device.MySensorsEntity, LightEntity): """Representation of a MySensors Light child node.""" def __init__(self, *args): """Initialize a MySensors Light.""" super().__init__(*args) self._state = None self._brightness = None self._hs = None self._white = None @property def brightness(self): """Return the brightness of this light between 0..255.""" return self._brightness @property def hs_color(self): """Return the hs color value [int, int].""" return self._hs @property def white_value(self): """Return the white value of this light between 0..255.""" return self._white @property def assumed_state(self): """Return true if unable to access real state of entity.""" return self.gateway.optimistic @property def is_on(self): """Return true if device is on.""" return self._state def _turn_on_light(self): """Turn on light child device.""" set_req = self.gateway.const.SetReq if self._state: return self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_LIGHT, 1, ack=1 ) if self.gateway.optimistic: # optimistically assume that light has changed state self._state = True self._values[set_req.V_LIGHT] = STATE_ON def _turn_on_dimmer(self, **kwargs): """Turn on dimmer child device.""" set_req = self.gateway.const.SetReq brightness = self._brightness if ( ATTR_BRIGHTNESS not in kwargs or kwargs[ATTR_BRIGHTNESS] == self._brightness or set_req.V_DIMMER not in self._values ): return brightness = kwargs[ATTR_BRIGHTNESS] percent = round(100 * brightness / 255) self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_DIMMER, percent, ack=1 ) if self.gateway.optimistic: # optimistically assume that light has changed state self._brightness = brightness self._values[set_req.V_DIMMER] = percent def _turn_on_rgb_and_w(self, hex_template, **kwargs): """Turn on RGB or RGBW child device.""" rgb = list(color_util.color_hs_to_RGB(*self._hs)) white = self._white hex_color = self._values.get(self.value_type) hs_color = kwargs.get(ATTR_HS_COLOR) if hs_color is not None: new_rgb = color_util.color_hs_to_RGB(*hs_color) else: new_rgb = None new_white = kwargs.get(ATTR_WHITE_VALUE) if new_rgb is None and new_white is None: return if new_rgb is not None: rgb = list(new_rgb) if hex_template == "%02x%02x%02x%02x": if new_white is not None: rgb.append(new_white) else: rgb.append(white) hex_color = hex_template % tuple(rgb) if len(rgb) > 3: white = rgb.pop() self.gateway.set_child_value( self.node_id, self.child_id, self.value_type, hex_color, ack=1 ) if self.gateway.optimistic: # optimistically assume that light has changed state self._hs = color_util.color_RGB_to_hs(*rgb) self._white = white self._values[self.value_type] = hex_color async def async_turn_off(self, **kwargs): """Turn the device off.""" value_type = self.gateway.const.SetReq.V_LIGHT self.gateway.set_child_value(self.node_id, self.child_id, value_type, 0, ack=1) if self.gateway.optimistic: # optimistically assume that light has changed state self._state = False self._values[value_type] = STATE_OFF self.async_write_ha_state() @callback def _async_update_light(self): """Update the controller with values from light child.""" value_type = self.gateway.const.SetReq.V_LIGHT self._state = self._values[value_type] == STATE_ON @callback def _async_update_dimmer(self): """Update the controller with values from dimmer child.""" value_type = self.gateway.const.SetReq.V_DIMMER if value_type in self._values: self._brightness = round(255 * int(self._values[value_type]) / 100) if self._brightness == 0: self._state = False @callback def _async_update_rgb_or_w(self): """Update the controller with values from RGB or RGBW child.""" value = self._values[self.value_type] color_list = rgb_hex_to_rgb_list(value) if len(color_list) > 3: self._white = color_list.pop() self._hs = color_util.color_RGB_to_hs(*color_list) class MySensorsLightDimmer(MySensorsLight): """Dimmer child class to MySensorsLight.""" @property def supported_features(self): """Flag supported features.""" return SUPPORT_BRIGHTNESS async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) if self.gateway.optimistic: self.async_write_ha_state() async def async_update(self): """Update the controller with the latest value from a sensor.""" await super().async_update() self._async_update_light() self._async_update_dimmer() class MySensorsLightRGB(MySensorsLight): """RGB child class to MySensorsLight.""" @property def supported_features(self): """Flag supported features.""" set_req = self.gateway.const.SetReq if set_req.V_DIMMER in self._values: return SUPPORT_BRIGHTNESS | SUPPORT_COLOR return SUPPORT_COLOR async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) self._turn_on_rgb_and_w("%02x%02x%02x", **kwargs) if self.gateway.optimistic: self.async_write_ha_state() async def async_update(self): """Update the controller with the latest value from a sensor.""" await super().async_update() self._async_update_light() self._async_update_dimmer() self._async_update_rgb_or_w() class MySensorsLightRGBW(MySensorsLightRGB): """RGBW child class to MySensorsLightRGB.""" @property def supported_features(self): """Flag supported features.""" set_req = self.gateway.const.SetReq if set_req.V_DIMMER in self._values: return SUPPORT_BRIGHTNESS | SUPPORT_MYSENSORS_RGBW return SUPPORT_MYSENSORS_RGBW async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) self._turn_on_rgb_and_w("%02x%02x%02x%02x", **kwargs) if self.gateway.optimistic: self.async_write_ha_state()
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/mysensors/light.py
"""An abstract class common to all Bond entities.""" from abc import abstractmethod from asyncio import TimeoutError as AsyncIOTimeoutError import logging from typing import Any, Dict, Optional from aiohttp import ClientError from homeassistant.const import ATTR_NAME from homeassistant.helpers.entity import Entity from .const import DOMAIN from .utils import BondDevice, BondHub _LOGGER = logging.getLogger(__name__) class BondEntity(Entity): """Generic Bond entity encapsulating common features of any Bond controlled device.""" def __init__(self, hub: BondHub, device: BondDevice): """Initialize entity with API and device info.""" self._hub = hub self._device = device self._available = True @property def unique_id(self) -> Optional[str]: """Get unique ID for the entity.""" return self._device.device_id @property def name(self) -> Optional[str]: """Get entity name.""" return self._device.name @property def device_info(self) -> Optional[Dict[str, Any]]: """Get a an HA device representing this Bond controlled device.""" return { ATTR_NAME: self.name, "identifiers": {(DOMAIN, self._device.device_id)}, "via_device": (DOMAIN, self._hub.bond_id), } @property def assumed_state(self) -> bool: """Let HA know this entity relies on an assumed state tracked by Bond.""" return True @property def available(self) -> bool: """Report availability of this entity based on last API call results.""" return self._available async def async_update(self): """Fetch assumed state of the cover from the hub using API.""" try: state: dict = await self._hub.bond.device_state(self._device.device_id) except (ClientError, AsyncIOTimeoutError, OSError) as error: if self._available: _LOGGER.warning( "Entity %s has become unavailable", self.entity_id, exc_info=error ) self._available = False else: if not self._available: _LOGGER.info("Entity %s has come back", self.entity_id) self._available = True self._apply_state(state) @abstractmethod def _apply_state(self, state: dict): raise NotImplementedError
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/bond/entity.py
"""Support for MyChevy.""" from datetime import timedelta import logging import threading import time import mychevy.mychevy as mc import voluptuous as vol from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers import config_validation as cv, discovery from homeassistant.util import Throttle DOMAIN = "mychevy" UPDATE_TOPIC = DOMAIN ERROR_TOPIC = f"{DOMAIN}_error" MYCHEVY_SUCCESS = "success" MYCHEVY_ERROR = "error" NOTIFICATION_ID = "mychevy_website_notification" NOTIFICATION_TITLE = "MyChevy website status" _LOGGER = logging.getLogger(__name__) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=30) ERROR_SLEEP_TIME = timedelta(minutes=30) CONF_COUNTRY = "country" DEFAULT_COUNTRY = "us" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_COUNTRY, default=DEFAULT_COUNTRY): vol.All( cv.string, vol.In(["us", "ca"]) ), } ) }, extra=vol.ALLOW_EXTRA, ) class EVSensorConfig: """The EV sensor configuration.""" def __init__( self, name, attr, unit_of_measurement=None, icon=None, extra_attrs=None ): """Create new sensor configuration.""" self.name = name self.attr = attr self.extra_attrs = extra_attrs or [] self.unit_of_measurement = unit_of_measurement self.icon = icon class EVBinarySensorConfig: """The EV binary sensor configuration.""" def __init__(self, name, attr, device_class=None): """Create new binary sensor configuration.""" self.name = name self.attr = attr self.device_class = device_class def setup(hass, base_config): """Set up the mychevy component.""" config = base_config.get(DOMAIN) email = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) country = config.get(CONF_COUNTRY) hass.data[DOMAIN] = MyChevyHub( mc.MyChevy(email, password, country), hass, base_config ) hass.data[DOMAIN].start() return True class MyChevyHub(threading.Thread): """MyChevy Hub. Connecting to the mychevy website is done through a selenium webscraping process. That can only run synchronously. In order to prevent blocking of other parts of Home Assistant the architecture launches a polling loop in a thread. When new data is received, sensors are updated, and hass is signaled that there are updates. Sensors are not created until the first update, which will be 60 - 120 seconds after the platform starts. """ def __init__(self, client, hass, hass_config): """Initialize MyChevy Hub.""" super().__init__() self._client = client self.hass = hass self.hass_config = hass_config self.cars = [] self.status = None self.ready = False @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Update sensors from mychevy website. This is a synchronous polling call that takes a very long time (like 2 to 3 minutes long time) """ self._client.login() self._client.get_cars() self.cars = self._client.cars if self.ready is not True: discovery.load_platform(self.hass, "sensor", DOMAIN, {}, self.hass_config) discovery.load_platform( self.hass, "binary_sensor", DOMAIN, {}, self.hass_config ) self.ready = True self.cars = self._client.update_cars() def get_car(self, vid): """Compatibility to work with one car.""" if self.cars: for car in self.cars: if car.vid == vid: return car return None def run(self): """Thread run loop.""" # We add the status device first outside of the loop # And then busy wait on threads while True: try: _LOGGER.info("Starting mychevy loop") self.update() self.hass.helpers.dispatcher.dispatcher_send(UPDATE_TOPIC) time.sleep(MIN_TIME_BETWEEN_UPDATES.seconds) except Exception: # pylint: disable=broad-except _LOGGER.exception( "Error updating mychevy data. " "This probably means the OnStar link is down again" ) self.hass.helpers.dispatcher.dispatcher_send(ERROR_TOPIC) time.sleep(ERROR_SLEEP_TIME.seconds)
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/mychevy/__init__.py
"""The BleBox devices integration.""" import asyncio import logging from blebox_uniapi.error import Error from blebox_uniapi.products import Products from blebox_uniapi.session import ApiHost from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.entity import Entity from .const import DEFAULT_SETUP_TIMEOUT, DOMAIN, PRODUCT _LOGGER = logging.getLogger(__name__) PLATFORMS = ["cover", "sensor", "switch", "air_quality", "light", "climate"] PARALLEL_UPDATES = 0 async def async_setup(hass: HomeAssistant, config: dict): """Set up the BleBox devices component.""" return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up BleBox devices from a config entry.""" websession = async_get_clientsession(hass) host = entry.data[CONF_HOST] port = entry.data[CONF_PORT] timeout = DEFAULT_SETUP_TIMEOUT api_host = ApiHost(host, port, timeout, websession, hass.loop) try: product = await Products.async_from_host(api_host) except Error as ex: _LOGGER.error("Identify failed at %s:%d (%s)", api_host.host, api_host.port, ex) raise ConfigEntryNotReady from ex domain = hass.data.setdefault(DOMAIN, {}) domain_entry = domain.setdefault(entry.entry_id, {}) product = domain_entry.setdefault(PRODUCT, product) for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, platform) for platform in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok @callback def create_blebox_entities( hass, config_entry, async_add_entities, entity_klass, entity_type ): """Create entities from a BleBox product's features.""" product = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] entities = [] if entity_type in product.features: for feature in product.features[entity_type]: entities.append(entity_klass(feature)) async_add_entities(entities, True) class BleBoxEntity(Entity): """Implements a common class for entities representing a BleBox feature.""" def __init__(self, feature): """Initialize a BleBox entity.""" self._feature = feature @property def name(self): """Return the internal entity name.""" return self._feature.full_name @property def unique_id(self): """Return a unique id.""" return self._feature.unique_id async def async_update(self): """Update the entity state.""" try: await self._feature.async_update() except Error as ex: _LOGGER.error("Updating '%s' failed: %s", self.name, ex) @property def device_info(self): """Return device information for this entity.""" product = self._feature.product return { "identifiers": {(DOMAIN, product.unique_id)}, "name": product.name, "manufacturer": product.brand, "model": product.model, "sw_version": product.firmware_version, }
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/blebox/__init__.py
"""Support for Z-Wave.""" # pylint: disable=import-outside-toplevel import asyncio import copy from importlib import import_module import logging from pprint import pprint import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( ATTR_ENTITY_ID, EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import CoreState, callback from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import ( async_get_registry as async_get_device_registry, ) from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import generate_entity_id from homeassistant.helpers.entity_component import DEFAULT_SCAN_INTERVAL from homeassistant.helpers.entity_platform import EntityPlatform from homeassistant.helpers.entity_registry import ( async_get_registry as async_get_entity_registry, ) from homeassistant.helpers.entity_values import EntityValues from homeassistant.helpers.event import async_track_time_change from homeassistant.util import convert import homeassistant.util.dt as dt_util from . import config_flow # noqa: F401 pylint: disable=unused-import from . import const, websocket_api as wsapi, workaround from .const import ( CONF_AUTOHEAL, CONF_CONFIG_PATH, CONF_DEBUG, CONF_NETWORK_KEY, CONF_POLLING_INTERVAL, CONF_USB_STICK_PATH, DATA_DEVICES, DATA_ENTITY_VALUES, DATA_NETWORK, DATA_ZWAVE_CONFIG, DEFAULT_CONF_AUTOHEAL, DEFAULT_CONF_USB_STICK_PATH, DEFAULT_DEBUG, DEFAULT_POLLING_INTERVAL, DOMAIN, ) from .discovery_schemas import DISCOVERY_SCHEMAS from .node_entity import ZWaveBaseEntity, ZWaveNodeEntity from .util import ( check_has_unique_id, check_node_schema, check_value_schema, is_node_parsed, node_device_id_and_name, node_name, ) _LOGGER = logging.getLogger(__name__) CLASS_ID = "class_id" ATTR_POWER = "power_consumption" CONF_POLLING_INTENSITY = "polling_intensity" CONF_IGNORED = "ignored" CONF_INVERT_OPENCLOSE_BUTTONS = "invert_openclose_buttons" CONF_INVERT_PERCENT = "invert_percent" CONF_REFRESH_VALUE = "refresh_value" CONF_REFRESH_DELAY = "delay" CONF_DEVICE_CONFIG = "device_config" CONF_DEVICE_CONFIG_GLOB = "device_config_glob" CONF_DEVICE_CONFIG_DOMAIN = "device_config_domain" DEFAULT_CONF_IGNORED = False DEFAULT_CONF_INVERT_OPENCLOSE_BUTTONS = False DEFAULT_CONF_INVERT_PERCENT = False DEFAULT_CONF_REFRESH_VALUE = False DEFAULT_CONF_REFRESH_DELAY = 5 SUPPORTED_PLATFORMS = [ "binary_sensor", "climate", "cover", "fan", "lock", "light", "sensor", "switch", ] RENAME_NODE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_NAME): cv.string, vol.Optional(const.ATTR_UPDATE_IDS, default=False): cv.boolean, } ) RENAME_VALUE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_VALUE_ID): vol.Coerce(int), vol.Required(const.ATTR_NAME): cv.string, vol.Optional(const.ATTR_UPDATE_IDS, default=False): cv.boolean, } ) SET_CONFIG_PARAMETER_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_VALUE): vol.Any(vol.Coerce(int), cv.string), vol.Optional(const.ATTR_CONFIG_SIZE, default=2): vol.Coerce(int), } ) SET_NODE_VALUE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_VALUE_ID): vol.Any(vol.Coerce(int), cv.string), vol.Required(const.ATTR_CONFIG_VALUE): vol.Any(vol.Coerce(int), cv.string), } ) REFRESH_NODE_VALUE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_VALUE_ID): vol.Coerce(int), } ) SET_POLL_INTENSITY_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_VALUE_ID): vol.Coerce(int), vol.Required(const.ATTR_POLL_INTENSITY): vol.Coerce(int), } ) PRINT_CONFIG_PARAMETER_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int), } ) NODE_SERVICE_SCHEMA = vol.Schema({vol.Required(const.ATTR_NODE_ID): vol.Coerce(int)}) REFRESH_ENTITY_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.entity_id}) RESET_NODE_METERS_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Optional(const.ATTR_INSTANCE, default=1): vol.Coerce(int), } ) CHANGE_ASSOCIATION_SCHEMA = vol.Schema( { vol.Required(const.ATTR_ASSOCIATION): cv.string, vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_TARGET_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_GROUP): vol.Coerce(int), vol.Optional(const.ATTR_INSTANCE, default=0x00): vol.Coerce(int), } ) SET_WAKEUP_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_VALUE): vol.All( vol.Coerce(int), cv.positive_int ), } ) HEAL_NODE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Optional(const.ATTR_RETURN_ROUTES, default=False): cv.boolean, } ) TEST_NODE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Optional(const.ATTR_MESSAGES, default=1): cv.positive_int, } ) DEVICE_CONFIG_SCHEMA_ENTRY = vol.Schema( { vol.Optional(CONF_POLLING_INTENSITY): cv.positive_int, vol.Optional(CONF_IGNORED, default=DEFAULT_CONF_IGNORED): cv.boolean, vol.Optional( CONF_INVERT_OPENCLOSE_BUTTONS, default=DEFAULT_CONF_INVERT_OPENCLOSE_BUTTONS ): cv.boolean, vol.Optional( CONF_INVERT_PERCENT, default=DEFAULT_CONF_INVERT_PERCENT ): cv.boolean, vol.Optional( CONF_REFRESH_VALUE, default=DEFAULT_CONF_REFRESH_VALUE ): cv.boolean, vol.Optional( CONF_REFRESH_DELAY, default=DEFAULT_CONF_REFRESH_DELAY ): cv.positive_int, } ) SIGNAL_REFRESH_ENTITY_FORMAT = "zwave_refresh_entity_{}" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional(CONF_AUTOHEAL, default=DEFAULT_CONF_AUTOHEAL): cv.boolean, vol.Optional(CONF_CONFIG_PATH): cv.string, vol.Optional(CONF_NETWORK_KEY): vol.All( cv.string, vol.Match(r"(0x\w\w,\s?){15}0x\w\w") ), vol.Optional(CONF_DEVICE_CONFIG, default={}): vol.Schema( {cv.entity_id: DEVICE_CONFIG_SCHEMA_ENTRY} ), vol.Optional(CONF_DEVICE_CONFIG_GLOB, default={}): vol.Schema( {cv.string: DEVICE_CONFIG_SCHEMA_ENTRY} ), vol.Optional(CONF_DEVICE_CONFIG_DOMAIN, default={}): vol.Schema( {cv.string: DEVICE_CONFIG_SCHEMA_ENTRY} ), vol.Optional(CONF_DEBUG, default=DEFAULT_DEBUG): cv.boolean, vol.Optional( CONF_POLLING_INTERVAL, default=DEFAULT_POLLING_INTERVAL ): cv.positive_int, vol.Optional(CONF_USB_STICK_PATH): cv.string, } ) }, extra=vol.ALLOW_EXTRA, ) def _obj_to_dict(obj): """Convert an object into a hash for debug.""" return { key: getattr(obj, key) for key in dir(obj) if key[0] != "_" and not callable(getattr(obj, key)) } def _value_name(value): """Return the name of the value.""" return f"{node_name(value.node)} {value.label}".strip() def nice_print_node(node): """Print a nice formatted node to the output (debug method).""" node_dict = _obj_to_dict(node) node_dict["values"] = { value_id: _obj_to_dict(value) for value_id, value in node.values.items() } _LOGGER.info("FOUND NODE %s \n%s", node.product_name, node_dict) def get_config_value(node, value_index, tries=5): """Return the current configuration value for a specific index.""" try: for value in node.values.values(): if ( value.command_class == const.COMMAND_CLASS_CONFIGURATION and value.index == value_index ): return value.data except RuntimeError: # If we get a runtime error the dict has changed while # we was looking for a value, just do it again return ( None if tries <= 0 else get_config_value(node, value_index, tries=tries - 1) ) return None async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Z-Wave platform (generic part).""" if discovery_info is None or DATA_NETWORK not in hass.data: return False device = hass.data[DATA_DEVICES].get(discovery_info[const.DISCOVERY_DEVICE]) if device is None: return False async_add_entities([device]) return True async def async_setup(hass, config): """Set up Z-Wave components.""" if DOMAIN not in config: return True conf = config[DOMAIN] hass.data[DATA_ZWAVE_CONFIG] = conf if not hass.config_entries.async_entries(DOMAIN): hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={ CONF_USB_STICK_PATH: conf.get( CONF_USB_STICK_PATH, DEFAULT_CONF_USB_STICK_PATH ), CONF_NETWORK_KEY: conf.get(CONF_NETWORK_KEY), }, ) ) return True async def async_setup_entry(hass, config_entry): """Set up Z-Wave from a config entry. Will automatically load components to support devices found on the network. """ from pydispatch import dispatcher # pylint: disable=import-error from openzwave.option import ZWaveOption from openzwave.network import ZWaveNetwork from openzwave.group import ZWaveGroup # Merge config entry and yaml config config = config_entry.data if DATA_ZWAVE_CONFIG in hass.data: config = {**config, **hass.data[DATA_ZWAVE_CONFIG]} # Update hass.data with merged config so we can access it elsewhere hass.data[DATA_ZWAVE_CONFIG] = config # Load configuration use_debug = config.get(CONF_DEBUG, DEFAULT_DEBUG) autoheal = config.get(CONF_AUTOHEAL, DEFAULT_CONF_AUTOHEAL) device_config = EntityValues( config.get(CONF_DEVICE_CONFIG), config.get(CONF_DEVICE_CONFIG_DOMAIN), config.get(CONF_DEVICE_CONFIG_GLOB), ) usb_path = config[CONF_USB_STICK_PATH] _LOGGER.info("Z-Wave USB path is %s", usb_path) # Setup options options = ZWaveOption( usb_path, user_path=hass.config.config_dir, config_path=config.get(CONF_CONFIG_PATH), ) options.set_console_output(use_debug) if config.get(CONF_NETWORK_KEY): options.addOption("NetworkKey", config[CONF_NETWORK_KEY]) await hass.async_add_executor_job(options.lock) network = hass.data[DATA_NETWORK] = ZWaveNetwork(options, autostart=False) hass.data[DATA_DEVICES] = {} hass.data[DATA_ENTITY_VALUES] = [] registry = await async_get_entity_registry(hass) wsapi.async_load_websocket_api(hass) if use_debug: # pragma: no cover def log_all(signal, value=None): """Log all the signals.""" print("") print("SIGNAL *****", signal) if value and signal in ( ZWaveNetwork.SIGNAL_VALUE_CHANGED, ZWaveNetwork.SIGNAL_VALUE_ADDED, ZWaveNetwork.SIGNAL_SCENE_EVENT, ZWaveNetwork.SIGNAL_NODE_EVENT, ZWaveNetwork.SIGNAL_AWAKE_NODES_QUERIED, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED_SOME_DEAD, ): pprint(_obj_to_dict(value)) print("") dispatcher.connect(log_all, weak=False) def value_added(node, value): """Handle new added value to a node on the network.""" # Check if this value should be tracked by an existing entity for values in hass.data[DATA_ENTITY_VALUES]: values.check_value(value) for schema in DISCOVERY_SCHEMAS: if not check_node_schema(node, schema): continue if not check_value_schema( value, schema[const.DISC_VALUES][const.DISC_PRIMARY] ): continue values = ZWaveDeviceEntityValues( hass, schema, value, config, device_config, registry ) # We create a new list and update the reference here so that # the list can be safely iterated over in the main thread new_values = hass.data[DATA_ENTITY_VALUES] + [values] hass.data[DATA_ENTITY_VALUES] = new_values platform = EntityPlatform( hass=hass, logger=_LOGGER, domain=DOMAIN, platform_name=DOMAIN, platform=None, scan_interval=DEFAULT_SCAN_INTERVAL, entity_namespace=None, ) platform.config_entry = config_entry def node_added(node): """Handle a new node on the network.""" entity = ZWaveNodeEntity(node, network) async def _add_node_to_component(): if hass.data[DATA_DEVICES].get(entity.unique_id): return name = node_name(node) generated_id = generate_entity_id(DOMAIN + ".{}", name, []) node_config = device_config.get(generated_id) if node_config.get(CONF_IGNORED): _LOGGER.info( "Ignoring node entity %s due to device settings", generated_id ) return hass.data[DATA_DEVICES][entity.unique_id] = entity await platform.async_add_entities([entity]) if entity.unique_id: hass.async_add_job(_add_node_to_component()) return @callback def _on_ready(sec): _LOGGER.info("Z-Wave node %d ready after %d seconds", entity.node_id, sec) hass.async_add_job(_add_node_to_component) @callback def _on_timeout(sec): _LOGGER.warning( "Z-Wave node %d not ready after %d seconds, continuing anyway", entity.node_id, sec, ) hass.async_add_job(_add_node_to_component) hass.add_job(check_has_unique_id, entity, _on_ready, _on_timeout) def node_removed(node): node_id = node.node_id node_key = f"node-{node_id}" for key in list(hass.data[DATA_DEVICES]): if key is None: continue if not key.startswith(f"{node_id}-"): continue entity = hass.data[DATA_DEVICES][key] _LOGGER.debug( "Removing Entity - value: %s - entity_id: %s", key, entity.entity_id ) hass.add_job(entity.node_removed()) del hass.data[DATA_DEVICES][key] entity = hass.data[DATA_DEVICES][node_key] hass.add_job(entity.node_removed()) del hass.data[DATA_DEVICES][node_key] hass.add_job(_remove_device(node)) async def _remove_device(node): dev_reg = await async_get_device_registry(hass) identifier, name = node_device_id_and_name(node) device = dev_reg.async_get_device(identifiers={identifier}, connections=set()) if device is not None: _LOGGER.debug("Removing Device - %s - %s", device.id, name) dev_reg.async_remove_device(device.id) def network_ready(): """Handle the query of all awake nodes.""" _LOGGER.info( "Z-Wave network is ready for use. All awake nodes " "have been queried. Sleeping nodes will be " "queried when they awake" ) hass.bus.fire(const.EVENT_NETWORK_READY) def network_complete(): """Handle the querying of all nodes on network.""" _LOGGER.info( "Z-Wave network is complete. All nodes on the network have been queried" ) hass.bus.fire(const.EVENT_NETWORK_COMPLETE) def network_complete_some_dead(): """Handle the querying of all nodes on network.""" _LOGGER.info( "Z-Wave network is complete. All nodes on the network " "have been queried, but some nodes are marked dead" ) hass.bus.fire(const.EVENT_NETWORK_COMPLETE_SOME_DEAD) dispatcher.connect(value_added, ZWaveNetwork.SIGNAL_VALUE_ADDED, weak=False) dispatcher.connect(node_added, ZWaveNetwork.SIGNAL_NODE_ADDED, weak=False) dispatcher.connect(node_removed, ZWaveNetwork.SIGNAL_NODE_REMOVED, weak=False) dispatcher.connect( network_ready, ZWaveNetwork.SIGNAL_AWAKE_NODES_QUERIED, weak=False ) dispatcher.connect( network_complete, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED, weak=False ) dispatcher.connect( network_complete_some_dead, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED_SOME_DEAD, weak=False, ) def add_node(service): """Switch into inclusion mode.""" _LOGGER.info("Z-Wave add_node have been initialized") network.controller.add_node() def add_node_secure(service): """Switch into secure inclusion mode.""" _LOGGER.info("Z-Wave add_node_secure have been initialized") network.controller.add_node(True) def remove_node(service): """Switch into exclusion mode.""" _LOGGER.info("Z-Wave remove_node have been initialized") network.controller.remove_node() def cancel_command(service): """Cancel a running controller command.""" _LOGGER.info("Cancel running Z-Wave command") network.controller.cancel_command() def heal_network(service): """Heal the network.""" _LOGGER.info("Z-Wave heal running") network.heal() def soft_reset(service): """Soft reset the controller.""" _LOGGER.info("Z-Wave soft_reset have been initialized") network.controller.soft_reset() def test_network(service): """Test the network by sending commands to all the nodes.""" _LOGGER.info("Z-Wave test_network have been initialized") network.test() def stop_network(_service_or_event): """Stop Z-Wave network.""" _LOGGER.info("Stopping Z-Wave network") network.stop() if hass.state == CoreState.running: hass.bus.fire(const.EVENT_NETWORK_STOP) async def rename_node(service): """Rename a node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] name = service.data.get(const.ATTR_NAME) node.name = name _LOGGER.info("Renamed Z-Wave node %d to %s", node_id, name) update_ids = service.data.get(const.ATTR_UPDATE_IDS) # We want to rename the device, the node entity, # and all the contained entities node_key = f"node-{node_id}" entity = hass.data[DATA_DEVICES][node_key] await entity.node_renamed(update_ids) for key in list(hass.data[DATA_DEVICES]): if not key.startswith(f"{node_id}-"): continue entity = hass.data[DATA_DEVICES][key] await entity.value_renamed(update_ids) async def rename_value(service): """Rename a node value.""" node_id = service.data.get(const.ATTR_NODE_ID) value_id = service.data.get(const.ATTR_VALUE_ID) node = network.nodes[node_id] value = node.values[value_id] name = service.data.get(const.ATTR_NAME) value.label = name _LOGGER.info( "Renamed Z-Wave value (Node %d Value %d) to %s", node_id, value_id, name ) update_ids = service.data.get(const.ATTR_UPDATE_IDS) value_key = f"{node_id}-{value_id}" entity = hass.data[DATA_DEVICES][value_key] await entity.value_renamed(update_ids) def set_poll_intensity(service): """Set the polling intensity of a node value.""" node_id = service.data.get(const.ATTR_NODE_ID) value_id = service.data.get(const.ATTR_VALUE_ID) node = network.nodes[node_id] value = node.values[value_id] intensity = service.data.get(const.ATTR_POLL_INTENSITY) if intensity == 0: if value.disable_poll(): _LOGGER.info("Polling disabled (Node %d Value %d)", node_id, value_id) return _LOGGER.info( "Polling disabled failed (Node %d Value %d)", node_id, value_id ) else: if value.enable_poll(intensity): _LOGGER.info( "Set polling intensity (Node %d Value %d) to %s", node_id, value_id, intensity, ) return _LOGGER.info( "Set polling intensity failed (Node %d Value %d)", node_id, value_id ) def remove_failed_node(service): """Remove failed node.""" node_id = service.data.get(const.ATTR_NODE_ID) _LOGGER.info("Trying to remove zwave node %d", node_id) network.controller.remove_failed_node(node_id) def replace_failed_node(service): """Replace failed node.""" node_id = service.data.get(const.ATTR_NODE_ID) _LOGGER.info("Trying to replace zwave node %d", node_id) network.controller.replace_failed_node(node_id) def set_config_parameter(service): """Set a config parameter to a node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] param = service.data.get(const.ATTR_CONFIG_PARAMETER) selection = service.data.get(const.ATTR_CONFIG_VALUE) size = service.data.get(const.ATTR_CONFIG_SIZE) for value in node.get_values( class_id=const.COMMAND_CLASS_CONFIGURATION ).values(): if value.index != param: continue if value.type == const.TYPE_BOOL: value.data = int(selection == "True") _LOGGER.info( "Setting configuration parameter %s on Node %s with bool selection %s", param, node_id, str(selection), ) return if value.type == const.TYPE_LIST: value.data = str(selection) _LOGGER.info( "Setting configuration parameter %s on Node %s with list selection %s", param, node_id, str(selection), ) return if value.type == const.TYPE_BUTTON: network.manager.pressButton(value.value_id) network.manager.releaseButton(value.value_id) _LOGGER.info( "Setting configuration parameter %s on Node %s " "with button selection %s", param, node_id, selection, ) return value.data = int(selection) _LOGGER.info( "Setting configuration parameter %s on Node %s with selection %s", param, node_id, selection, ) return node.set_config_param(param, selection, size) _LOGGER.info( "Setting unknown configuration parameter %s on Node %s with selection %s", param, node_id, selection, ) def refresh_node_value(service): """Refresh the specified value from a node.""" node_id = service.data.get(const.ATTR_NODE_ID) value_id = service.data.get(const.ATTR_VALUE_ID) node = network.nodes[node_id] node.values[value_id].refresh() _LOGGER.info("Node %s value %s refreshed", node_id, value_id) def set_node_value(service): """Set the specified value on a node.""" node_id = service.data.get(const.ATTR_NODE_ID) value_id = service.data.get(const.ATTR_VALUE_ID) value = service.data.get(const.ATTR_CONFIG_VALUE) node = network.nodes[node_id] node.values[value_id].data = value _LOGGER.info("Node %s value %s set to %s", node_id, value_id, value) def print_config_parameter(service): """Print a config parameter from a node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] param = service.data.get(const.ATTR_CONFIG_PARAMETER) _LOGGER.info( "Config parameter %s on Node %s: %s", param, node_id, get_config_value(node, param), ) def print_node(service): """Print all information about z-wave node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] nice_print_node(node) def set_wakeup(service): """Set wake-up interval of a node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] value = service.data.get(const.ATTR_CONFIG_VALUE) if node.can_wake_up(): for value_id in node.get_values(class_id=const.COMMAND_CLASS_WAKE_UP): node.values[value_id].data = value _LOGGER.info("Node %s wake-up set to %d", node_id, value) else: _LOGGER.info("Node %s is not wakeable", node_id) def change_association(service): """Change an association in the zwave network.""" association_type = service.data.get(const.ATTR_ASSOCIATION) node_id = service.data.get(const.ATTR_NODE_ID) target_node_id = service.data.get(const.ATTR_TARGET_NODE_ID) group = service.data.get(const.ATTR_GROUP) instance = service.data.get(const.ATTR_INSTANCE) node = ZWaveGroup(group, network, node_id) if association_type == "add": node.add_association(target_node_id, instance) _LOGGER.info( "Adding association for node:%s in group:%s " "target node:%s, instance=%s", node_id, group, target_node_id, instance, ) if association_type == "remove": node.remove_association(target_node_id, instance) _LOGGER.info( "Removing association for node:%s in group:%s " "target node:%s, instance=%s", node_id, group, target_node_id, instance, ) async def async_refresh_entity(service): """Refresh values that specific entity depends on.""" entity_id = service.data.get(ATTR_ENTITY_ID) async_dispatcher_send(hass, SIGNAL_REFRESH_ENTITY_FORMAT.format(entity_id)) def refresh_node(service): """Refresh all node info.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] node.refresh_info() def reset_node_meters(service): """Reset meter counters of a node.""" node_id = service.data.get(const.ATTR_NODE_ID) instance = service.data.get(const.ATTR_INSTANCE) node = network.nodes[node_id] for value in node.get_values(class_id=const.COMMAND_CLASS_METER).values(): if value.index != const.INDEX_METER_RESET: continue if value.instance != instance: continue network.manager.pressButton(value.value_id) network.manager.releaseButton(value.value_id) _LOGGER.info( "Resetting meters on node %s instance %s....", node_id, instance ) return _LOGGER.info( "Node %s on instance %s does not have resettable meters", node_id, instance ) def heal_node(service): """Heal a node on the network.""" node_id = service.data.get(const.ATTR_NODE_ID) update_return_routes = service.data.get(const.ATTR_RETURN_ROUTES) node = network.nodes[node_id] _LOGGER.info("Z-Wave node heal running for node %s", node_id) node.heal(update_return_routes) def test_node(service): """Send test messages to a node on the network.""" node_id = service.data.get(const.ATTR_NODE_ID) messages = service.data.get(const.ATTR_MESSAGES) node = network.nodes[node_id] _LOGGER.info("Sending %s test-messages to node %s", messages, node_id) node.test(messages) def start_zwave(_service_or_event): """Startup Z-Wave network.""" _LOGGER.info("Starting Z-Wave network...") network.start() hass.bus.fire(const.EVENT_NETWORK_START) async def _check_awaked(): """Wait for Z-wave awaked state (or timeout) and finalize start.""" _LOGGER.debug("network state: %d %s", network.state, network.state_str) start_time = dt_util.utcnow() while True: waited = int((dt_util.utcnow() - start_time).total_seconds()) if network.state >= network.STATE_AWAKED: # Need to be in STATE_AWAKED before talking to nodes. _LOGGER.info("Z-Wave ready after %d seconds", waited) break if waited >= const.NETWORK_READY_WAIT_SECS: # Wait up to NETWORK_READY_WAIT_SECS seconds for the Z-Wave # network to be ready. _LOGGER.warning( "Z-Wave not ready after %d seconds, continuing anyway", waited ) _LOGGER.info( "final network state: %d %s", network.state, network.state_str ) break await asyncio.sleep(1) hass.async_add_job(_finalize_start) hass.add_job(_check_awaked) def _finalize_start(): """Perform final initializations after Z-Wave network is awaked.""" polling_interval = convert(config.get(CONF_POLLING_INTERVAL), int) if polling_interval is not None: network.set_poll_interval(polling_interval, False) poll_interval = network.get_poll_interval() _LOGGER.info("Z-Wave polling interval set to %d ms", poll_interval) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_network) # Register node services for Z-Wave network hass.services.register(DOMAIN, const.SERVICE_ADD_NODE, add_node) hass.services.register(DOMAIN, const.SERVICE_ADD_NODE_SECURE, add_node_secure) hass.services.register(DOMAIN, const.SERVICE_REMOVE_NODE, remove_node) hass.services.register(DOMAIN, const.SERVICE_CANCEL_COMMAND, cancel_command) hass.services.register(DOMAIN, const.SERVICE_HEAL_NETWORK, heal_network) hass.services.register(DOMAIN, const.SERVICE_SOFT_RESET, soft_reset) hass.services.register(DOMAIN, const.SERVICE_TEST_NETWORK, test_network) hass.services.register(DOMAIN, const.SERVICE_STOP_NETWORK, stop_network) hass.services.register( DOMAIN, const.SERVICE_RENAME_NODE, rename_node, schema=RENAME_NODE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_RENAME_VALUE, rename_value, schema=RENAME_VALUE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_SET_CONFIG_PARAMETER, set_config_parameter, schema=SET_CONFIG_PARAMETER_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_SET_NODE_VALUE, set_node_value, schema=SET_NODE_VALUE_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_REFRESH_NODE_VALUE, refresh_node_value, schema=REFRESH_NODE_VALUE_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_PRINT_CONFIG_PARAMETER, print_config_parameter, schema=PRINT_CONFIG_PARAMETER_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_REMOVE_FAILED_NODE, remove_failed_node, schema=NODE_SERVICE_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_REPLACE_FAILED_NODE, replace_failed_node, schema=NODE_SERVICE_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_CHANGE_ASSOCIATION, change_association, schema=CHANGE_ASSOCIATION_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_SET_WAKEUP, set_wakeup, schema=SET_WAKEUP_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_PRINT_NODE, print_node, schema=NODE_SERVICE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_REFRESH_ENTITY, async_refresh_entity, schema=REFRESH_ENTITY_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_REFRESH_NODE, refresh_node, schema=NODE_SERVICE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_RESET_NODE_METERS, reset_node_meters, schema=RESET_NODE_METERS_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_SET_POLL_INTENSITY, set_poll_intensity, schema=SET_POLL_INTENSITY_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_HEAL_NODE, heal_node, schema=HEAL_NODE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_TEST_NODE, test_node, schema=TEST_NODE_SCHEMA ) # Setup autoheal if autoheal: _LOGGER.info("Z-Wave network autoheal is enabled") async_track_time_change(hass, heal_network, hour=0, minute=0, second=0) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_zwave) hass.services.async_register(DOMAIN, const.SERVICE_START_NETWORK, start_zwave) for entry_component in SUPPORTED_PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, entry_component) ) return True class ZWaveDeviceEntityValues: """Manages entity access to the underlying zwave value objects.""" def __init__( self, hass, schema, primary_value, zwave_config, device_config, registry ): """Initialize the values object with the passed entity schema.""" self._hass = hass self._zwave_config = zwave_config self._device_config = device_config self._schema = copy.deepcopy(schema) self._values = {} self._entity = None self._workaround_ignore = False self._registry = registry for name in self._schema[const.DISC_VALUES].keys(): self._values[name] = None self._schema[const.DISC_VALUES][name][const.DISC_INSTANCE] = [ primary_value.instance ] self._values[const.DISC_PRIMARY] = primary_value self._node = primary_value.node self._schema[const.DISC_NODE_ID] = [self._node.node_id] # Check values that have already been discovered for node for value in self._node.values.values(): self.check_value(value) self._check_entity_ready() def __getattr__(self, name): """Get the specified value for this entity.""" return self._values[name] def __iter__(self): """Allow iteration over all values.""" return iter(self._values.values()) def check_value(self, value): """Check if the new value matches a missing value for this entity. If a match is found, it is added to the values mapping. """ if not check_node_schema(value.node, self._schema): return for name in self._values: if self._values[name] is not None: continue if not check_value_schema(value, self._schema[const.DISC_VALUES][name]): continue self._values[name] = value if self._entity: self._entity.value_added() self._entity.value_changed() self._check_entity_ready() def _check_entity_ready(self): """Check if all required values are discovered and create entity.""" if self._workaround_ignore: return if self._entity is not None: return for name in self._schema[const.DISC_VALUES]: if self._values[name] is None and not self._schema[const.DISC_VALUES][ name ].get(const.DISC_OPTIONAL): return component = self._schema[const.DISC_COMPONENT] workaround_component = workaround.get_device_component_mapping(self.primary) if workaround_component and workaround_component != component: if workaround_component == workaround.WORKAROUND_IGNORE: _LOGGER.info( "Ignoring Node %d Value %d due to workaround", self.primary.node.node_id, self.primary.value_id, ) # No entity will be created for this value self._workaround_ignore = True return _LOGGER.debug("Using %s instead of %s", workaround_component, component) component = workaround_component entity_id = self._registry.async_get_entity_id( component, DOMAIN, compute_value_unique_id(self._node, self.primary) ) if entity_id is None: value_name = _value_name(self.primary) entity_id = generate_entity_id(component + ".{}", value_name, []) node_config = self._device_config.get(entity_id) # Configure node _LOGGER.debug( "Adding Node_id=%s Generic_command_class=%s, " "Specific_command_class=%s, " "Command_class=%s, Value type=%s, " "Genre=%s as %s", self._node.node_id, self._node.generic, self._node.specific, self.primary.command_class, self.primary.type, self.primary.genre, component, ) if node_config.get(CONF_IGNORED): _LOGGER.info("Ignoring entity %s due to device settings", entity_id) # No entity will be created for this value self._workaround_ignore = True return polling_intensity = convert(node_config.get(CONF_POLLING_INTENSITY), int) if polling_intensity: self.primary.enable_poll(polling_intensity) platform = import_module(f".{component}", __name__) device = platform.get_device( node=self._node, values=self, node_config=node_config, hass=self._hass ) if device is None: # No entity will be created for this value self._workaround_ignore = True return self._entity = device @callback def _on_ready(sec): _LOGGER.info( "Z-Wave entity %s (node_id: %d) ready after %d seconds", device.name, self._node.node_id, sec, ) self._hass.async_add_job(discover_device, component, device) @callback def _on_timeout(sec): _LOGGER.warning( "Z-Wave entity %s (node_id: %d) not ready after %d seconds, " "continuing anyway", device.name, self._node.node_id, sec, ) self._hass.async_add_job(discover_device, component, device) async def discover_device(component, device): """Put device in a dictionary and call discovery on it.""" if self._hass.data[DATA_DEVICES].get(device.unique_id): return self._hass.data[DATA_DEVICES][device.unique_id] = device if component in SUPPORTED_PLATFORMS: async_dispatcher_send(self._hass, f"zwave_new_{component}", device) else: await discovery.async_load_platform( self._hass, component, DOMAIN, {const.DISCOVERY_DEVICE: device.unique_id}, self._zwave_config, ) if device.unique_id: self._hass.add_job(discover_device, component, device) else: self._hass.add_job(check_has_unique_id, device, _on_ready, _on_timeout) class ZWaveDeviceEntity(ZWaveBaseEntity): """Representation of a Z-Wave node entity.""" def __init__(self, values, domain): """Initialize the z-Wave device.""" # pylint: disable=import-error super().__init__() from openzwave.network import ZWaveNetwork from pydispatch import dispatcher self.values = values self.node = values.primary.node self.values.primary.set_change_verified(False) self._name = _value_name(self.values.primary) self._unique_id = self._compute_unique_id() self._update_attributes() dispatcher.connect( self.network_value_changed, ZWaveNetwork.SIGNAL_VALUE_CHANGED ) def network_value_changed(self, value): """Handle a value change on the network.""" if value.value_id in [v.value_id for v in self.values if v]: return self.value_changed() def value_added(self): """Handle a new value of this entity.""" def value_changed(self): """Handle a changed value for this entity's node.""" self._update_attributes() self.update_properties() self.maybe_schedule_update() async def value_renamed(self, update_ids=False): """Rename the node and update any IDs.""" self._name = _value_name(self.values.primary) if update_ids: # Update entity ID. ent_reg = await async_get_entity_registry(self.hass) new_entity_id = ent_reg.async_generate_entity_id( self.platform.domain, self._name, self.platform.entities.keys() - {self.entity_id}, ) if new_entity_id != self.entity_id: # Don't change the name attribute, it will be None unless # customised and if it's been customised, keep the # customisation. ent_reg.async_update_entity(self.entity_id, new_entity_id=new_entity_id) return # else for the above two ifs, update if not using update_entity self.async_write_ha_state() async def async_added_to_hass(self): """Add device to dict.""" async_dispatcher_connect( self.hass, SIGNAL_REFRESH_ENTITY_FORMAT.format(self.entity_id), self.refresh_from_network, ) def _update_attributes(self): """Update the node attributes. May only be used inside callback.""" self.node_id = self.node.node_id self._name = _value_name(self.values.primary) if not self._unique_id: self._unique_id = self._compute_unique_id() if self._unique_id: self.try_remove_and_add() if self.values.power: self.power_consumption = round( self.values.power.data, self.values.power.precision ) else: self.power_consumption = None def update_properties(self): """Update on data changes for node values.""" @property def should_poll(self): """No polling needed.""" return False @property def unique_id(self): """Return a unique ID.""" return self._unique_id @property def device_info(self): """Return device information.""" identifier, name = node_device_id_and_name( self.node, self.values.primary.instance ) info = { "name": name, "identifiers": {identifier}, "manufacturer": self.node.manufacturer_name, "model": self.node.product_name, } if self.values.primary.instance > 1: info["via_device"] = (DOMAIN, self.node_id) elif self.node_id > 1: info["via_device"] = (DOMAIN, 1) return info @property def name(self): """Return the name of the device.""" return self._name @property def device_state_attributes(self): """Return the device specific state attributes.""" attrs = { const.ATTR_NODE_ID: self.node_id, const.ATTR_VALUE_INDEX: self.values.primary.index, const.ATTR_VALUE_INSTANCE: self.values.primary.instance, const.ATTR_VALUE_ID: str(self.values.primary.value_id), } if self.power_consumption is not None: attrs[ATTR_POWER] = self.power_consumption return attrs def refresh_from_network(self): """Refresh all dependent values from zwave network.""" for value in self.values: if value is not None: self.node.refresh_value(value.value_id) def _compute_unique_id(self): if ( is_node_parsed(self.node) and self.values.primary.label != "Unknown" ) or self.node.is_ready: return compute_value_unique_id(self.node, self.values.primary) return None def compute_value_unique_id(node, value): """Compute unique_id a value would get if it were to get one.""" return f"{node.node_id}-{value.object_id}"
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/zwave/__init__.py
"""The template component.""" from itertools import chain import logging from homeassistant.const import MATCH_ALL _LOGGER = logging.getLogger(__name__) def initialise_templates(hass, templates, attribute_templates=None): """Initialise templates and attribute templates.""" if attribute_templates is None: attribute_templates = {} for template in chain(templates.values(), attribute_templates.values()): if template is None: continue template.hass = hass def extract_entities( device_name, device_type, manual_entity_ids, templates, attribute_templates=None ): """Extract entity ids from templates and attribute templates.""" if attribute_templates is None: attribute_templates = {} entity_ids = set() if manual_entity_ids is None: invalid_templates = [] for template_name, template in chain( templates.items(), attribute_templates.items() ): if template is None: continue template_entity_ids = template.extract_entities() if template_entity_ids != MATCH_ALL: entity_ids |= set(template_entity_ids) else: invalid_templates.append(template_name.replace("_template", "")) entity_ids = list(entity_ids) if invalid_templates: if not entity_ids: entity_ids = MATCH_ALL _LOGGER.warning( "Template %s '%s' has no entity ids configured to track nor" " were we able to extract the entities to track from the %s " "template(s). This entity will only be able to be updated " "manually", device_type, device_name, ", ".join(invalid_templates), ) else: entity_ids = manual_entity_ids return entity_ids
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/template/__init__.py
"""Support for Niko Home Control.""" from datetime import timedelta import logging import nikohomecontrol import voluptuous as vol # Import the device class from the component that you want to support from homeassistant.components.light import ATTR_BRIGHTNESS, PLATFORM_SCHEMA, LightEntity from homeassistant.const import CONF_HOST from homeassistant.exceptions import PlatformNotReady import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1) SCAN_INTERVAL = timedelta(seconds=30) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_HOST): cv.string}) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Niko Home Control light platform.""" host = config[CONF_HOST] try: nhc = nikohomecontrol.NikoHomeControl( {"ip": host, "port": 8000, "timeout": 20000} ) niko_data = NikoHomeControlData(hass, nhc) await niko_data.async_update() except OSError as err: _LOGGER.error("Unable to access %s (%s)", host, err) raise PlatformNotReady async_add_entities( [NikoHomeControlLight(light, niko_data) for light in nhc.list_actions()], True ) class NikoHomeControlLight(LightEntity): """Representation of an Niko Light.""" def __init__(self, light, data): """Set up the Niko Home Control light platform.""" self._data = data self._light = light self._unique_id = f"light-{light.id}" self._name = light.name self._state = light.is_on self._brightness = None @property def unique_id(self): """Return unique ID for light.""" return self._unique_id @property def name(self): """Return the display name of this light.""" return self._name @property def brightness(self): """Return the brightness of the light.""" return self._brightness @property def is_on(self): """Return true if light is on.""" return self._state def turn_on(self, **kwargs): """Instruct the light to turn on.""" self._light.brightness = kwargs.get(ATTR_BRIGHTNESS, 255) _LOGGER.debug("Turn on: %s", self.name) self._light.turn_on() def turn_off(self, **kwargs): """Instruct the light to turn off.""" _LOGGER.debug("Turn off: %s", self.name) self._light.turn_off() async def async_update(self): """Get the latest data from NikoHomeControl API.""" await self._data.async_update() self._state = self._data.get_state(self._light.id) class NikoHomeControlData: """The class for handling data retrieval.""" def __init__(self, hass, nhc): """Set up Niko Home Control Data object.""" self._nhc = nhc self.hass = hass self.available = True self.data = {} self._system_info = None @Throttle(MIN_TIME_BETWEEN_UPDATES) async def async_update(self): """Get the latest data from the NikoHomeControl API.""" _LOGGER.debug("Fetching async state in bulk") try: self.data = await self.hass.async_add_executor_job( self._nhc.list_actions_raw ) self.available = True except OSError as ex: _LOGGER.error("Unable to retrieve data from Niko, %s", str(ex)) self.available = False def get_state(self, aid): """Find and filter state based on action id.""" for state in self.data: if state["id"] == aid: return state["value1"] != 0 _LOGGER.error("Failed to retrieve state off unknown light")
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/niko_home_control/light.py
"""Component that will help set the Microsoft face for verify processing.""" import logging import voluptuous as vol from homeassistant.components.image_processing import ( ATTR_CONFIDENCE, CONF_CONFIDENCE, CONF_ENTITY_ID, CONF_NAME, CONF_SOURCE, PLATFORM_SCHEMA, ImageProcessingFaceEntity, ) from homeassistant.components.microsoft_face import DATA_MICROSOFT_FACE from homeassistant.const import ATTR_NAME from homeassistant.core import split_entity_id from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_GROUP = "group" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_GROUP): cv.slugify}) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Microsoft Face identify platform.""" api = hass.data[DATA_MICROSOFT_FACE] face_group = config[CONF_GROUP] confidence = config[CONF_CONFIDENCE] entities = [] for camera in config[CONF_SOURCE]: entities.append( MicrosoftFaceIdentifyEntity( camera[CONF_ENTITY_ID], api, face_group, confidence, camera.get(CONF_NAME), ) ) async_add_entities(entities) class MicrosoftFaceIdentifyEntity(ImageProcessingFaceEntity): """Representation of the Microsoft Face API entity for identify.""" def __init__(self, camera_entity, api, face_group, confidence, name=None): """Initialize the Microsoft Face API.""" super().__init__() self._api = api self._camera = camera_entity self._confidence = confidence self._face_group = face_group if name: self._name = name else: self._name = f"MicrosoftFace {split_entity_id(camera_entity)[1]}" @property def confidence(self): """Return minimum confidence for send events.""" return self._confidence @property def camera_entity(self): """Return camera entity id from process pictures.""" return self._camera @property def name(self): """Return the name of the entity.""" return self._name async def async_process_image(self, image): """Process image. This method is a coroutine. """ detect = [] try: face_data = await self._api.call_api("post", "detect", image, binary=True) if face_data: face_ids = [data["faceId"] for data in face_data] detect = await self._api.call_api( "post", "identify", {"faceIds": face_ids, "personGroupId": self._face_group}, ) except HomeAssistantError as err: _LOGGER.error("Can't process image on Microsoft face: %s", err) return # Parse data known_faces = [] total = 0 for face in detect: total += 1 if not face["candidates"]: continue data = face["candidates"][0] name = "" for s_name, s_id in self._api.store[self._face_group].items(): if data["personId"] == s_id: name = s_name break known_faces.append( {ATTR_NAME: name, ATTR_CONFIDENCE: data["confidence"] * 100} ) self.async_process_faces(known_faces, total)
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/microsoft_face_identify/image_processing.py
"""Support for Huawei LTE sensors.""" import logging import re from typing import Optional import attr from homeassistant.components.sensor import ( DEVICE_CLASS_SIGNAL_STRENGTH, DOMAIN as SENSOR_DOMAIN, ) from homeassistant.const import CONF_URL, DATA_BYTES, STATE_UNKNOWN, TIME_SECONDS from . import HuaweiLteBaseEntity from .const import ( DOMAIN, KEY_DEVICE_INFORMATION, KEY_DEVICE_SIGNAL, KEY_MONITORING_MONTH_STATISTICS, KEY_MONITORING_STATUS, KEY_MONITORING_TRAFFIC_STATISTICS, KEY_NET_CURRENT_PLMN, KEY_NET_NET_MODE, KEY_SMS_SMS_COUNT, SENSOR_KEYS, ) _LOGGER = logging.getLogger(__name__) SENSOR_META = { KEY_DEVICE_INFORMATION: dict( include=re.compile(r"^WanIP.*Address$", re.IGNORECASE) ), (KEY_DEVICE_INFORMATION, "WanIPAddress"): dict( name="WAN IP address", icon="mdi:ip", enabled_default=True ), (KEY_DEVICE_INFORMATION, "WanIPv6Address"): dict( name="WAN IPv6 address", icon="mdi:ip" ), (KEY_DEVICE_SIGNAL, "band"): dict(name="Band"), (KEY_DEVICE_SIGNAL, "cell_id"): dict(name="Cell ID"), (KEY_DEVICE_SIGNAL, "lac"): dict(name="LAC", icon="mdi:map-marker"), (KEY_DEVICE_SIGNAL, "mode"): dict( name="Mode", formatter=lambda x: ({"0": "2G", "2": "3G", "7": "4G"}.get(x, "Unknown"), None), ), (KEY_DEVICE_SIGNAL, "pci"): dict(name="PCI"), (KEY_DEVICE_SIGNAL, "rsrq"): dict( name="RSRQ", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # http://www.lte-anbieter.info/technik/rsrq.php icon=lambda x: (x is None or x < -11) and "mdi:signal-cellular-outline" or x < -8 and "mdi:signal-cellular-1" or x < -5 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", enabled_default=True, ), (KEY_DEVICE_SIGNAL, "rsrp"): dict( name="RSRP", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # http://www.lte-anbieter.info/technik/rsrp.php icon=lambda x: (x is None or x < -110) and "mdi:signal-cellular-outline" or x < -95 and "mdi:signal-cellular-1" or x < -80 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", enabled_default=True, ), (KEY_DEVICE_SIGNAL, "rssi"): dict( name="RSSI", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # https://eyesaas.com/wi-fi-signal-strength/ icon=lambda x: (x is None or x < -80) and "mdi:signal-cellular-outline" or x < -70 and "mdi:signal-cellular-1" or x < -60 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", enabled_default=True, ), (KEY_DEVICE_SIGNAL, "sinr"): dict( name="SINR", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # http://www.lte-anbieter.info/technik/sinr.php icon=lambda x: (x is None or x < 0) and "mdi:signal-cellular-outline" or x < 5 and "mdi:signal-cellular-1" or x < 10 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", enabled_default=True, ), (KEY_DEVICE_SIGNAL, "rscp"): dict( name="RSCP", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # https://wiki.teltonika.lt/view/RSCP icon=lambda x: (x is None or x < -95) and "mdi:signal-cellular-outline" or x < -85 and "mdi:signal-cellular-1" or x < -75 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", ), (KEY_DEVICE_SIGNAL, "ecio"): dict( name="EC/IO", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # https://wiki.teltonika.lt/view/EC/IO icon=lambda x: (x is None or x < -20) and "mdi:signal-cellular-outline" or x < -10 and "mdi:signal-cellular-1" or x < -6 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", ), KEY_MONITORING_MONTH_STATISTICS: dict( exclude=re.compile(r"^month(duration|lastcleartime)$", re.IGNORECASE) ), (KEY_MONITORING_MONTH_STATISTICS, "CurrentMonthDownload"): dict( name="Current month download", unit=DATA_BYTES, icon="mdi:download" ), (KEY_MONITORING_MONTH_STATISTICS, "CurrentMonthUpload"): dict( name="Current month upload", unit=DATA_BYTES, icon="mdi:upload" ), KEY_MONITORING_STATUS: dict( include=re.compile( r"^(currentwifiuser|(primary|secondary).*dns)$", re.IGNORECASE ) ), (KEY_MONITORING_STATUS, "CurrentWifiUser"): dict( name="WiFi clients connected", icon="mdi:wifi" ), (KEY_MONITORING_STATUS, "PrimaryDns"): dict( name="Primary DNS server", icon="mdi:ip" ), (KEY_MONITORING_STATUS, "SecondaryDns"): dict( name="Secondary DNS server", icon="mdi:ip" ), (KEY_MONITORING_STATUS, "PrimaryIPv6Dns"): dict( name="Primary IPv6 DNS server", icon="mdi:ip" ), (KEY_MONITORING_STATUS, "SecondaryIPv6Dns"): dict( name="Secondary IPv6 DNS server", icon="mdi:ip" ), KEY_MONITORING_TRAFFIC_STATISTICS: dict( exclude=re.compile(r"^showtraffic$", re.IGNORECASE) ), (KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentConnectTime"): dict( name="Current connection duration", unit=TIME_SECONDS, icon="mdi:timer-outline" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentDownload"): dict( name="Current connection download", unit=DATA_BYTES, icon="mdi:download" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentUpload"): dict( name="Current connection upload", unit=DATA_BYTES, icon="mdi:upload" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "TotalConnectTime"): dict( name="Total connected duration", unit=TIME_SECONDS, icon="mdi:timer-outline" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "TotalDownload"): dict( name="Total download", unit=DATA_BYTES, icon="mdi:download" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "TotalUpload"): dict( name="Total upload", unit=DATA_BYTES, icon="mdi:upload" ), KEY_NET_CURRENT_PLMN: dict(exclude=re.compile(r"^(Rat|ShortName)$", re.IGNORECASE)), (KEY_NET_CURRENT_PLMN, "State"): dict( name="Operator search mode", formatter=lambda x: ({"0": "Auto", "1": "Manual"}.get(x, "Unknown"), None), ), (KEY_NET_CURRENT_PLMN, "FullName"): dict(name="Operator name",), (KEY_NET_CURRENT_PLMN, "Numeric"): dict(name="Operator code",), KEY_NET_NET_MODE: dict(include=re.compile(r"^NetworkMode$", re.IGNORECASE)), (KEY_NET_NET_MODE, "NetworkMode"): dict( name="Preferred mode", formatter=lambda x: ( { "00": "4G/3G/2G", "01": "2G", "02": "3G", "03": "4G", "0301": "4G/2G", "0302": "4G/3G", "0201": "3G/2G", }.get(x, "Unknown"), None, ), ), (KEY_SMS_SMS_COUNT, "LocalUnread"): dict( name="SMS unread", icon="mdi:email-receive", ), } async def async_setup_entry(hass, config_entry, async_add_entities): """Set up from config entry.""" router = hass.data[DOMAIN].routers[config_entry.data[CONF_URL]] sensors = [] for key in SENSOR_KEYS: items = router.data.get(key) if not items: continue key_meta = SENSOR_META.get(key) if key_meta: include = key_meta.get("include") if include: items = filter(include.search, items) exclude = key_meta.get("exclude") if exclude: items = [x for x in items if not exclude.search(x)] for item in items: sensors.append( HuaweiLteSensor(router, key, item, SENSOR_META.get((key, item), {})) ) async_add_entities(sensors, True) def format_default(value): """Format value.""" unit = None if value is not None: # Clean up value and infer unit, e.g. -71dBm, 15 dB match = re.match( r"([>=<]*)(?P<value>.+?)\s*(?P<unit>[a-zA-Z]+)\s*$", str(value) ) if match: try: value = float(match.group("value")) unit = match.group("unit") except ValueError: pass return value, unit @attr.s class HuaweiLteSensor(HuaweiLteBaseEntity): """Huawei LTE sensor entity.""" key: str = attr.ib() item: str = attr.ib() meta: dict = attr.ib() _state = attr.ib(init=False, default=STATE_UNKNOWN) _unit: str = attr.ib(init=False) async def async_added_to_hass(self): """Subscribe to needed data on add.""" await super().async_added_to_hass() self.router.subscriptions[self.key].add(f"{SENSOR_DOMAIN}/{self.item}") async def async_will_remove_from_hass(self): """Unsubscribe from needed data on remove.""" await super().async_will_remove_from_hass() self.router.subscriptions[self.key].remove(f"{SENSOR_DOMAIN}/{self.item}") @property def _entity_name(self) -> str: return self.meta.get("name", self.item) @property def _device_unique_id(self) -> str: return f"{self.key}.{self.item}" @property def state(self): """Return sensor state.""" return self._state @property def device_class(self) -> Optional[str]: """Return sensor device class.""" return self.meta.get("device_class") @property def unit_of_measurement(self): """Return sensor's unit of measurement.""" return self.meta.get("unit", self._unit) @property def icon(self): """Return icon for sensor.""" icon = self.meta.get("icon") if callable(icon): return icon(self.state) return icon @property def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" return bool(self.meta.get("enabled_default")) async def async_update(self): """Update state.""" try: value = self.router.data[self.key][self.item] except KeyError: _LOGGER.debug("%s[%s] not in data", self.key, self.item) self._available = False return self._available = True formatter = self.meta.get("formatter") if not callable(formatter): formatter = format_default self._state, self._unit = formatter(value) async def async_setup_platform(*args, **kwargs): """Old no longer used way to set up Huawei LTE sensors.""" _LOGGER.warning( "Loading and configuring as a platform is no longer supported or " "required, convert to enabling/disabling available entities" )
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/huawei_lte/sensor.py
"""Support for Yamaha MusicCast Receivers.""" import logging import socket import pymusiccast import voluptuous as vol from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity from homeassistant.components.media_player.const import ( MEDIA_TYPE_MUSIC, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PREVIOUS_TRACK, SUPPORT_SELECT_SOURCE, SUPPORT_STOP, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, ) from homeassistant.const import ( CONF_HOST, CONF_PORT, STATE_IDLE, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_UNKNOWN, ) import homeassistant.helpers.config_validation as cv import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) SUPPORTED_FEATURES = ( SUPPORT_PLAY | SUPPORT_PAUSE | SUPPORT_STOP | SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | SUPPORT_SELECT_SOURCE ) KNOWN_HOSTS_KEY = "data_yamaha_musiccast" INTERVAL_SECONDS = "interval_seconds" DEFAULT_PORT = 5005 DEFAULT_INTERVAL = 480 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(INTERVAL_SECONDS, default=DEFAULT_INTERVAL): cv.positive_int, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Yamaha MusicCast platform.""" known_hosts = hass.data.get(KNOWN_HOSTS_KEY) if known_hosts is None: known_hosts = hass.data[KNOWN_HOSTS_KEY] = [] _LOGGER.debug("known_hosts: %s", known_hosts) host = config.get(CONF_HOST) port = config.get(CONF_PORT) interval = config.get(INTERVAL_SECONDS) # Get IP of host to prevent duplicates try: ipaddr = socket.gethostbyname(host) except (OSError) as error: _LOGGER.error("Could not communicate with %s:%d: %s", host, port, error) return if [item for item in known_hosts if item[0] == ipaddr]: _LOGGER.warning("Host %s:%d already registered", host, port) return if [item for item in known_hosts if item[1] == port]: _LOGGER.warning("Port %s:%d already registered", host, port) return reg_host = (ipaddr, port) known_hosts.append(reg_host) try: receiver = pymusiccast.McDevice(ipaddr, udp_port=port, mc_interval=interval) except pymusiccast.exceptions.YMCInitError as err: _LOGGER.error(err) receiver = None if receiver: for zone in receiver.zones: _LOGGER.debug("Receiver: %s / Port: %d / Zone: %s", receiver, port, zone) add_entities([YamahaDevice(receiver, receiver.zones[zone])], True) else: known_hosts.remove(reg_host) class YamahaDevice(MediaPlayerEntity): """Representation of a Yamaha MusicCast device.""" def __init__(self, recv, zone): """Initialize the Yamaha MusicCast device.""" self._recv = recv self._name = recv.name self._source = None self._source_list = [] self._zone = zone self.mute = False self.media_status = None self.media_status_received = None self.power = STATE_UNKNOWN self.status = STATE_UNKNOWN self.volume = 0 self.volume_max = 0 self._recv.set_yamaha_device(self) self._zone.set_yamaha_device(self) @property def name(self): """Return the name of the device.""" return f"{self._name} ({self._zone.zone_id})" @property def state(self): """Return the state of the device.""" if self.power == STATE_ON and self.status != STATE_UNKNOWN: return self.status return self.power @property def should_poll(self): """Push an update after each command.""" return True @property def is_volume_muted(self): """Boolean if volume is currently muted.""" return self.mute @property def volume_level(self): """Volume level of the media player (0..1).""" return self.volume @property def supported_features(self): """Flag of features that are supported.""" return SUPPORTED_FEATURES @property def source(self): """Return the current input source.""" return self._source @property def source_list(self): """List of available input sources.""" return self._source_list @source_list.setter def source_list(self, value): """Set source_list attribute.""" self._source_list = value @property def media_content_type(self): """Return the media content type.""" return MEDIA_TYPE_MUSIC @property def media_duration(self): """Duration of current playing media in seconds.""" return self.media_status.media_duration if self.media_status else None @property def media_image_url(self): """Image url of current playing media.""" return self.media_status.media_image_url if self.media_status else None @property def media_artist(self): """Artist of current playing media, music track only.""" return self.media_status.media_artist if self.media_status else None @property def media_album(self): """Album of current playing media, music track only.""" return self.media_status.media_album if self.media_status else None @property def media_track(self): """Track number of current playing media, music track only.""" return self.media_status.media_track if self.media_status else None @property def media_title(self): """Title of current playing media.""" return self.media_status.media_title if self.media_status else None @property def media_position(self): """Position of current playing media in seconds.""" if self.media_status and self.state in [ STATE_PLAYING, STATE_PAUSED, STATE_IDLE, ]: return self.media_status.media_position @property def media_position_updated_at(self): """When was the position of the current playing media valid. Returns value from homeassistant.util.dt.utcnow(). """ return self.media_status_received if self.media_status else None def update(self): """Get the latest details from the device.""" _LOGGER.debug("update: %s", self.entity_id) self._recv.update_status() self._zone.update_status() def update_hass(self): """Push updates to Home Assistant.""" if self.entity_id: _LOGGER.debug("update_hass: pushing updates") self.schedule_update_ha_state() return True def turn_on(self): """Turn on specified media player or all.""" _LOGGER.debug("Turn device: on") self._zone.set_power(True) def turn_off(self): """Turn off specified media player or all.""" _LOGGER.debug("Turn device: off") self._zone.set_power(False) def media_play(self): """Send the media player the command for play/pause.""" _LOGGER.debug("Play") self._recv.set_playback("play") def media_pause(self): """Send the media player the command for pause.""" _LOGGER.debug("Pause") self._recv.set_playback("pause") def media_stop(self): """Send the media player the stop command.""" _LOGGER.debug("Stop") self._recv.set_playback("stop") def media_previous_track(self): """Send the media player the command for prev track.""" _LOGGER.debug("Previous") self._recv.set_playback("previous") def media_next_track(self): """Send the media player the command for next track.""" _LOGGER.debug("Next") self._recv.set_playback("next") def mute_volume(self, mute): """Send mute command.""" _LOGGER.debug("Mute volume: %s", mute) self._zone.set_mute(mute) def set_volume_level(self, volume): """Set volume level, range 0..1.""" _LOGGER.debug("Volume level: %.2f / %d", volume, volume * self.volume_max) self._zone.set_volume(volume * self.volume_max) def select_source(self, source): """Send the media player the command to select input source.""" _LOGGER.debug("select_source: %s", source) self.status = STATE_UNKNOWN self._zone.set_input(source) def new_media_status(self, status): """Handle updates of the media status.""" _LOGGER.debug("new media_status arrived") self.media_status = status self.media_status_received = dt_util.utcnow()
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/yamaha_musiccast/media_player.py
"""Support for Ubee router.""" import logging from pyubee import Ubee import voluptuous as vol from homeassistant.components.device_tracker import ( DOMAIN, PLATFORM_SCHEMA, DeviceScanner, ) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_MODEL = "model" DEFAULT_MODEL = "detect" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Optional(CONF_MODEL, default=DEFAULT_MODEL): vol.Any( "EVW32C-0N", "EVW320B", "EVW321B", "EVW3200-Wifi", "EVW3226@UPC", "DVW32CB", "DDW36C", ), } ) def get_scanner(hass, config): """Validate the configuration and return a Ubee scanner.""" info = config[DOMAIN] host = info[CONF_HOST] username = info[CONF_USERNAME] password = info[CONF_PASSWORD] model = info[CONF_MODEL] ubee = Ubee(host, username, password, model) if not ubee.login(): _LOGGER.error("Login failed") return None scanner = UbeeDeviceScanner(ubee) return scanner class UbeeDeviceScanner(DeviceScanner): """This class queries a wireless Ubee router.""" def __init__(self, ubee): """Initialize the Ubee scanner.""" self._ubee = ubee self._mac2name = {} def scan_devices(self): """Scan for new devices and return a list with found device IDs.""" devices = self._get_connected_devices() self._mac2name = devices return list(devices) def get_device_name(self, device): """Return the name of the given device or None if we don't know.""" return self._mac2name.get(device) def _get_connected_devices(self): """List connected devices with pyubee.""" if not self._ubee.session_active(): self._ubee.login() return self._ubee.get_connected_devices()
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/ubee/device_tracker.py
"""Component to interface with switches that can be controlled remotely.""" from datetime import timedelta import logging import voluptuous as vol from homeassistant.const import ( SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ON, ) from homeassistant.helpers.config_validation import ( # noqa: F401 PLATFORM_SCHEMA, PLATFORM_SCHEMA_BASE, ) from homeassistant.helpers.entity import ToggleEntity from homeassistant.helpers.entity_component import EntityComponent from homeassistant.loader import bind_hass # mypy: allow-untyped-defs, no-check-untyped-defs DOMAIN = "switch" SCAN_INTERVAL = timedelta(seconds=30) ENTITY_ID_FORMAT = DOMAIN + ".{}" ATTR_TODAY_ENERGY_KWH = "today_energy_kwh" ATTR_CURRENT_POWER_W = "current_power_w" MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10) PROP_TO_ATTR = { "current_power_w": ATTR_CURRENT_POWER_W, "today_energy_kwh": ATTR_TODAY_ENERGY_KWH, } DEVICE_CLASS_OUTLET = "outlet" DEVICE_CLASS_SWITCH = "switch" DEVICE_CLASSES = [DEVICE_CLASS_OUTLET, DEVICE_CLASS_SWITCH] DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.In(DEVICE_CLASSES)) _LOGGER = logging.getLogger(__name__) @bind_hass def is_on(hass, entity_id): """Return if the switch is on based on the statemachine. Async friendly. """ return hass.states.is_state(entity_id, STATE_ON) async def async_setup(hass, config): """Track states and offer events for switches.""" component = hass.data[DOMAIN] = EntityComponent( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") return True async def async_setup_entry(hass, entry): """Set up a config entry.""" return await hass.data[DOMAIN].async_setup_entry(entry) async def async_unload_entry(hass, entry): """Unload a config entry.""" return await hass.data[DOMAIN].async_unload_entry(entry) class SwitchEntity(ToggleEntity): """Representation of a switch.""" @property def current_power_w(self): """Return the current power usage in W.""" return None @property def today_energy_kwh(self): """Return the today total energy usage in kWh.""" return None @property def is_standby(self): """Return true if device is in standby.""" return None @property def state_attributes(self): """Return the optional state attributes.""" data = {} for prop, attr in PROP_TO_ATTR.items(): value = getattr(self, prop) if value is not None: data[attr] = value return data @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return None class SwitchDevice(SwitchEntity): """Representation of a switch (for backwards compatibility).""" def __init_subclass__(cls, **kwargs): """Print deprecation warning.""" super().__init_subclass__(**kwargs) _LOGGER.warning( "SwitchDevice is deprecated, modify %s to extend SwitchEntity", cls.__name__, )
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/switch/__init__.py
"""Support for Lutron Caseta shades.""" import logging from homeassistant.components.cover import ( ATTR_POSITION, DOMAIN, SUPPORT_CLOSE, SUPPORT_OPEN, SUPPORT_SET_POSITION, CoverEntity, ) from . import DOMAIN as CASETA_DOMAIN, LutronCasetaDevice _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Lutron Caseta cover platform. Adds shades from the Caseta bridge associated with the config_entry as cover entities. """ entities = [] bridge = hass.data[CASETA_DOMAIN][config_entry.entry_id] cover_devices = bridge.get_devices_by_domain(DOMAIN) for cover_device in cover_devices: entity = LutronCasetaCover(cover_device, bridge) entities.append(entity) async_add_entities(entities, True) class LutronCasetaCover(LutronCasetaDevice, CoverEntity): """Representation of a Lutron shade.""" @property def supported_features(self): """Flag supported features.""" return SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION @property def is_closed(self): """Return if the cover is closed.""" return self._device["current_state"] < 1 @property def current_cover_position(self): """Return the current position of cover.""" return self._device["current_state"] async def async_close_cover(self, **kwargs): """Close the cover.""" self._smartbridge.set_value(self.device_id, 0) async def async_open_cover(self, **kwargs): """Open the cover.""" self._smartbridge.set_value(self.device_id, 100) async def async_set_cover_position(self, **kwargs): """Move the shade to a specific position.""" if ATTR_POSITION in kwargs: position = kwargs[ATTR_POSITION] self._smartbridge.set_value(self.device_id, position) async def async_update(self): """Call when forcing a refresh of the device.""" self._device = self._smartbridge.get_device_by_id(self.device_id) _LOGGER.debug(self._device)
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/lutron_caseta/cover.py
"""Support for (EMEA/EU-based) Honeywell TCC climate systems. Such systems include evohome, Round Thermostat, and others. """ from datetime import datetime as dt, timedelta import logging import re from typing import Any, Dict, Optional, Tuple import aiohttp.client_exceptions import evohomeasync import evohomeasync2 import voluptuous as vol from homeassistant.const import ( ATTR_ENTITY_ID, CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME, HTTP_SERVICE_UNAVAILABLE, HTTP_TOO_MANY_REQUESTS, TEMP_CELSIUS, ) from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import Entity from homeassistant.helpers.service import verify_domain_control from homeassistant.helpers.typing import ConfigType, HomeAssistantType import homeassistant.util.dt as dt_util from .const import DOMAIN, EVO_FOLLOW, GWS, STORAGE_KEY, STORAGE_VER, TCS, UTC_OFFSET _LOGGER = logging.getLogger(__name__) ACCESS_TOKEN = "access_token" ACCESS_TOKEN_EXPIRES = "access_token_expires" REFRESH_TOKEN = "refresh_token" USER_DATA = "user_data" CONF_LOCATION_IDX = "location_idx" SCAN_INTERVAL_DEFAULT = timedelta(seconds=300) SCAN_INTERVAL_MINIMUM = timedelta(seconds=60) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_LOCATION_IDX, default=0): cv.positive_int, vol.Optional( CONF_SCAN_INTERVAL, default=SCAN_INTERVAL_DEFAULT ): vol.All(cv.time_period, vol.Range(min=SCAN_INTERVAL_MINIMUM)), } ) }, extra=vol.ALLOW_EXTRA, ) ATTR_SYSTEM_MODE = "mode" ATTR_DURATION_DAYS = "period" ATTR_DURATION_HOURS = "duration" ATTR_ZONE_TEMP = "setpoint" ATTR_DURATION_UNTIL = "duration" SVC_REFRESH_SYSTEM = "refresh_system" SVC_SET_SYSTEM_MODE = "set_system_mode" SVC_RESET_SYSTEM = "reset_system" SVC_SET_ZONE_OVERRIDE = "set_zone_override" SVC_RESET_ZONE_OVERRIDE = "clear_zone_override" RESET_ZONE_OVERRIDE_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.entity_id}) SET_ZONE_OVERRIDE_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_ZONE_TEMP): vol.All( vol.Coerce(float), vol.Range(min=4.0, max=35.0) ), vol.Optional(ATTR_DURATION_UNTIL): vol.All( cv.time_period, vol.Range(min=timedelta(days=0), max=timedelta(days=1)) ), } ) # system mode schemas are built dynamically, below def _dt_local_to_aware(dt_naive: dt) -> dt: dt_aware = dt_util.now() + (dt_naive - dt.now()) if dt_aware.microsecond >= 500000: dt_aware += timedelta(seconds=1) return dt_aware.replace(microsecond=0) def _dt_aware_to_naive(dt_aware: dt) -> dt: dt_naive = dt.now() + (dt_aware - dt_util.now()) if dt_naive.microsecond >= 500000: dt_naive += timedelta(seconds=1) return dt_naive.replace(microsecond=0) def convert_until(status_dict: dict, until_key: str) -> None: """Reformat a dt str from "%Y-%m-%dT%H:%M:%SZ" as local/aware/isoformat.""" if until_key in status_dict: # only present for certain modes dt_utc_naive = dt_util.parse_datetime(status_dict[until_key]) status_dict[until_key] = dt_util.as_local(dt_utc_naive).isoformat() def convert_dict(dictionary: Dict[str, Any]) -> Dict[str, Any]: """Recursively convert a dict's keys to snake_case.""" def convert_key(key: str) -> str: """Convert a string to snake_case.""" string = re.sub(r"[\-\.\s]", "_", str(key)) return (string[0]).lower() + re.sub( r"[A-Z]", lambda matched: f"_{matched.group(0).lower()}", string[1:] ) return { (convert_key(k) if isinstance(k, str) else k): ( convert_dict(v) if isinstance(v, dict) else v ) for k, v in dictionary.items() } def _handle_exception(err) -> bool: """Return False if the exception can't be ignored.""" try: raise err except evohomeasync2.AuthenticationError: _LOGGER.error( "Failed to authenticate with the vendor's server. " "Check your network and the vendor's service status page. " "Also check that your username and password are correct. " "Message is: %s", err, ) return False except aiohttp.ClientConnectionError: # this appears to be a common occurrence with the vendor's servers _LOGGER.warning( "Unable to connect with the vendor's server. " "Check your network and the vendor's service status page. " "Message is: %s", err, ) return False except aiohttp.ClientResponseError: if err.status == HTTP_SERVICE_UNAVAILABLE: _LOGGER.warning( "The vendor says their server is currently unavailable. " "Check the vendor's service status page" ) return False if err.status == HTTP_TOO_MANY_REQUESTS: _LOGGER.warning( "The vendor's API rate limit has been exceeded. " "If this message persists, consider increasing the %s", CONF_SCAN_INTERVAL, ) return False raise # we don't expect/handle any other Exceptions async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Create a (EMEA/EU-based) Honeywell TCC system.""" async def load_auth_tokens(store) -> Tuple[Dict, Optional[Dict]]: app_storage = await store.async_load() tokens = dict(app_storage if app_storage else {}) if tokens.pop(CONF_USERNAME, None) != config[DOMAIN][CONF_USERNAME]: # any tokens won't be valid, and store might be be corrupt await store.async_save({}) return ({}, None) # evohomeasync2 requires naive/local datetimes as strings if tokens.get(ACCESS_TOKEN_EXPIRES) is not None: tokens[ACCESS_TOKEN_EXPIRES] = _dt_aware_to_naive( dt_util.parse_datetime(tokens[ACCESS_TOKEN_EXPIRES]) ) user_data = tokens.pop(USER_DATA, None) return (tokens, user_data) store = hass.helpers.storage.Store(STORAGE_VER, STORAGE_KEY) tokens, user_data = await load_auth_tokens(store) client_v2 = evohomeasync2.EvohomeClient( config[DOMAIN][CONF_USERNAME], config[DOMAIN][CONF_PASSWORD], **tokens, session=async_get_clientsession(hass), ) try: await client_v2.login() except (aiohttp.ClientError, evohomeasync2.AuthenticationError) as err: _handle_exception(err) return False finally: config[DOMAIN][CONF_PASSWORD] = "REDACTED" loc_idx = config[DOMAIN][CONF_LOCATION_IDX] try: loc_config = client_v2.installation_info[loc_idx] except IndexError: _LOGGER.error( "Config error: '%s' = %s, but the valid range is 0-%s. " "Unable to continue. Fix any configuration errors and restart HA", CONF_LOCATION_IDX, loc_idx, len(client_v2.installation_info) - 1, ) return False if _LOGGER.isEnabledFor(logging.DEBUG): _config = {"locationInfo": {"timeZone": None}, GWS: [{TCS: None}]} _config["locationInfo"]["timeZone"] = loc_config["locationInfo"]["timeZone"] _config[GWS][0][TCS] = loc_config[GWS][0][TCS] _LOGGER.debug("Config = %s", _config) client_v1 = evohomeasync.EvohomeClient( client_v2.username, client_v2.password, user_data=user_data, session=async_get_clientsession(hass), ) hass.data[DOMAIN] = {} hass.data[DOMAIN]["broker"] = broker = EvoBroker( hass, client_v2, client_v1, store, config[DOMAIN] ) await broker.save_auth_tokens() await broker.async_update() # get initial state hass.async_create_task(async_load_platform(hass, "climate", DOMAIN, {}, config)) if broker.tcs.hotwater: hass.async_create_task( async_load_platform(hass, "water_heater", DOMAIN, {}, config) ) hass.helpers.event.async_track_time_interval( broker.async_update, config[DOMAIN][CONF_SCAN_INTERVAL] ) setup_service_functions(hass, broker) return True @callback def setup_service_functions(hass: HomeAssistantType, broker): """Set up the service handlers for the system/zone operating modes. Not all Honeywell TCC-compatible systems support all operating modes. In addition, each mode will require any of four distinct service schemas. This has to be enumerated before registering the appropriate handlers. It appears that all TCC-compatible systems support the same three zones modes. """ @verify_domain_control(hass, DOMAIN) async def force_refresh(call) -> None: """Obtain the latest state data via the vendor's RESTful API.""" await broker.async_update() @verify_domain_control(hass, DOMAIN) async def set_system_mode(call) -> None: """Set the system mode.""" payload = { "unique_id": broker.tcs.systemId, "service": call.service, "data": call.data, } async_dispatcher_send(hass, DOMAIN, payload) @verify_domain_control(hass, DOMAIN) async def set_zone_override(call) -> None: """Set the zone override (setpoint).""" entity_id = call.data[ATTR_ENTITY_ID] registry = await hass.helpers.entity_registry.async_get_registry() registry_entry = registry.async_get(entity_id) if registry_entry is None or registry_entry.platform != DOMAIN: raise ValueError(f"'{entity_id}' is not a known {DOMAIN} entity") if registry_entry.domain != "climate": raise ValueError(f"'{entity_id}' is not an {DOMAIN} controller/zone") payload = { "unique_id": registry_entry.unique_id, "service": call.service, "data": call.data, } async_dispatcher_send(hass, DOMAIN, payload) hass.services.async_register(DOMAIN, SVC_REFRESH_SYSTEM, force_refresh) # Enumerate which operating modes are supported by this system modes = broker.config["allowedSystemModes"] # Not all systems support "AutoWithReset": register this handler only if required if [m["systemMode"] for m in modes if m["systemMode"] == "AutoWithReset"]: hass.services.async_register(DOMAIN, SVC_RESET_SYSTEM, set_system_mode) system_mode_schemas = [] modes = [m for m in modes if m["systemMode"] != "AutoWithReset"] # Permanent-only modes will use this schema perm_modes = [m["systemMode"] for m in modes if not m["canBeTemporary"]] if perm_modes: # any of: "Auto", "HeatingOff": permanent only schema = vol.Schema({vol.Required(ATTR_SYSTEM_MODE): vol.In(perm_modes)}) system_mode_schemas.append(schema) modes = [m for m in modes if m["canBeTemporary"]] # These modes are set for a number of hours (or indefinitely): use this schema temp_modes = [m["systemMode"] for m in modes if m["timingMode"] == "Duration"] if temp_modes: # any of: "AutoWithEco", permanent or for 0-24 hours schema = vol.Schema( { vol.Required(ATTR_SYSTEM_MODE): vol.In(temp_modes), vol.Optional(ATTR_DURATION_HOURS): vol.All( cv.time_period, vol.Range(min=timedelta(hours=0), max=timedelta(hours=24)), ), } ) system_mode_schemas.append(schema) # These modes are set for a number of days (or indefinitely): use this schema temp_modes = [m["systemMode"] for m in modes if m["timingMode"] == "Period"] if temp_modes: # any of: "Away", "Custom", "DayOff", permanent or for 1-99 days schema = vol.Schema( { vol.Required(ATTR_SYSTEM_MODE): vol.In(temp_modes), vol.Optional(ATTR_DURATION_DAYS): vol.All( cv.time_period, vol.Range(min=timedelta(days=1), max=timedelta(days=99)), ), } ) system_mode_schemas.append(schema) if system_mode_schemas: hass.services.async_register( DOMAIN, SVC_SET_SYSTEM_MODE, set_system_mode, schema=vol.Any(*system_mode_schemas), ) # The zone modes are consistent across all systems and use the same schema hass.services.async_register( DOMAIN, SVC_RESET_ZONE_OVERRIDE, set_zone_override, schema=RESET_ZONE_OVERRIDE_SCHEMA, ) hass.services.async_register( DOMAIN, SVC_SET_ZONE_OVERRIDE, set_zone_override, schema=SET_ZONE_OVERRIDE_SCHEMA, ) class EvoBroker: """Container for evohome client and data.""" def __init__(self, hass, client, client_v1, store, params) -> None: """Initialize the evohome client and its data structure.""" self.hass = hass self.client = client self.client_v1 = client_v1 self._store = store self.params = params loc_idx = params[CONF_LOCATION_IDX] self.config = client.installation_info[loc_idx][GWS][0][TCS][0] self.tcs = client.locations[loc_idx]._gateways[0]._control_systems[0] self.tcs_utc_offset = timedelta( minutes=client.locations[loc_idx].timeZone[UTC_OFFSET] ) self.temps = {} async def save_auth_tokens(self) -> None: """Save access tokens and session IDs to the store for later use.""" # evohomeasync2 uses naive/local datetimes access_token_expires = _dt_local_to_aware(self.client.access_token_expires) app_storage = {CONF_USERNAME: self.client.username} app_storage[REFRESH_TOKEN] = self.client.refresh_token app_storage[ACCESS_TOKEN] = self.client.access_token app_storage[ACCESS_TOKEN_EXPIRES] = access_token_expires.isoformat() if self.client_v1 and self.client_v1.user_data: app_storage[USER_DATA] = { "userInfo": {"userID": self.client_v1.user_data["userInfo"]["userID"]}, "sessionId": self.client_v1.user_data["sessionId"], } else: app_storage[USER_DATA] = None await self._store.async_save(app_storage) async def call_client_api(self, api_function, refresh=True) -> Any: """Call a client API.""" try: result = await api_function except (aiohttp.ClientError, evohomeasync2.AuthenticationError) as err: if not _handle_exception(err): return if refresh: self.hass.helpers.event.async_call_later(1, self.async_update()) return result async def _update_v1(self, *args, **kwargs) -> None: """Get the latest high-precision temperatures of the default Location.""" def get_session_id(client_v1) -> Optional[str]: user_data = client_v1.user_data if client_v1 else None return user_data.get("sessionId") if user_data else None session_id = get_session_id(self.client_v1) try: temps = list(await self.client_v1.temperatures(force_refresh=True)) except aiohttp.ClientError as err: _LOGGER.warning( "Unable to obtain the latest high-precision temperatures. " "Check your network and the vendor's service status page. " "Proceeding with low-precision temperatures. " "Message is: %s", err, ) self.temps = None # these are now stale, will fall back to v2 temps else: if ( str(self.client_v1.location_id) != self.client.locations[self.params[CONF_LOCATION_IDX]].locationId ): _LOGGER.warning( "The v2 API's configured location doesn't match " "the v1 API's default location (there is more than one location), " "so the high-precision feature will be disabled" ) self.client_v1 = self.temps = None else: self.temps = {str(i["id"]): i["temp"] for i in temps} _LOGGER.debug("Temperatures = %s", self.temps) if session_id != get_session_id(self.client_v1): await self.save_auth_tokens() async def _update_v2(self, *args, **kwargs) -> None: """Get the latest modes, temperatures, setpoints of a Location.""" access_token = self.client.access_token loc_idx = self.params[CONF_LOCATION_IDX] try: status = await self.client.locations[loc_idx].status() except (aiohttp.ClientError, evohomeasync2.AuthenticationError) as err: _handle_exception(err) else: async_dispatcher_send(self.hass, DOMAIN) _LOGGER.debug("Status = %s", status) if access_token != self.client.access_token: await self.save_auth_tokens() async def async_update(self, *args, **kwargs) -> None: """Get the latest state data of an entire Honeywell TCC Location. This includes state data for a Controller and all its child devices, such as the operating mode of the Controller and the current temp of its children (e.g. Zones, DHW controller). """ await self._update_v2() if self.client_v1: await self._update_v1() # inform the evohome devices that state data has been updated async_dispatcher_send(self.hass, DOMAIN) class EvoDevice(Entity): """Base for any evohome device. This includes the Controller, (up to 12) Heating Zones and (optionally) a DHW controller. """ def __init__(self, evo_broker, evo_device) -> None: """Initialize the evohome entity.""" self._evo_device = evo_device self._evo_broker = evo_broker self._evo_tcs = evo_broker.tcs self._unique_id = self._name = self._icon = self._precision = None self._supported_features = None self._device_state_attrs = {} async def async_refresh(self, payload: Optional[dict] = None) -> None: """Process any signals.""" if payload is None: self.async_schedule_update_ha_state(force_refresh=True) return if payload["unique_id"] != self._unique_id: return if payload["service"] in [SVC_SET_ZONE_OVERRIDE, SVC_RESET_ZONE_OVERRIDE]: await self.async_zone_svc_request(payload["service"], payload["data"]) return await self.async_tcs_svc_request(payload["service"], payload["data"]) async def async_tcs_svc_request(self, service: dict, data: dict) -> None: """Process a service request (system mode) for a controller.""" raise NotImplementedError async def async_zone_svc_request(self, service: dict, data: dict) -> None: """Process a service request (setpoint override) for a zone.""" raise NotImplementedError @property def should_poll(self) -> bool: """Evohome entities should not be polled.""" return False @property def unique_id(self) -> Optional[str]: """Return a unique ID.""" return self._unique_id @property def name(self) -> str: """Return the name of the evohome entity.""" return self._name @property def device_state_attributes(self) -> Dict[str, Any]: """Return the evohome-specific state attributes.""" status = self._device_state_attrs if "systemModeStatus" in status: convert_until(status["systemModeStatus"], "timeUntil") if "setpointStatus" in status: convert_until(status["setpointStatus"], "until") if "stateStatus" in status: convert_until(status["stateStatus"], "until") return {"status": convert_dict(status)} @property def icon(self) -> str: """Return the icon to use in the frontend UI.""" return self._icon @property def supported_features(self) -> int: """Get the flag of supported features of the device.""" return self._supported_features async def async_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" async_dispatcher_connect(self.hass, DOMAIN, self.async_refresh) @property def precision(self) -> float: """Return the temperature precision to use in the frontend UI.""" return self._precision @property def temperature_unit(self) -> str: """Return the temperature unit to use in the frontend UI.""" return TEMP_CELSIUS class EvoChild(EvoDevice): """Base for any evohome child. This includes (up to 12) Heating Zones and (optionally) a DHW controller. """ def __init__(self, evo_broker, evo_device) -> None: """Initialize a evohome Controller (hub).""" super().__init__(evo_broker, evo_device) self._schedule = {} self._setpoints = {} @property def current_temperature(self) -> Optional[float]: """Return the current temperature of a Zone.""" if self._evo_broker.temps: if self._evo_broker.temps[self._evo_device.zoneId] != 128: return self._evo_broker.temps[self._evo_device.zoneId] if self._evo_device.temperatureStatus["isAvailable"]: return self._evo_device.temperatureStatus["temperature"] @property def setpoints(self) -> Dict[str, Any]: """Return the current/next setpoints from the schedule. Only Zones & DHW controllers (but not the TCS) can have schedules. """ def _dt_evo_to_aware(dt_naive: dt, utc_offset: timedelta) -> dt: dt_aware = dt_naive.replace(tzinfo=dt_util.UTC) - utc_offset return dt_util.as_local(dt_aware) if not self._schedule["DailySchedules"]: return {} # no schedule {'DailySchedules': []}, so no scheduled setpoints day_time = dt_util.now() day_of_week = int(day_time.strftime("%w")) # 0 is Sunday time_of_day = day_time.strftime("%H:%M:%S") try: # Iterate today's switchpoints until past the current time of day... day = self._schedule["DailySchedules"][day_of_week] sp_idx = -1 # last switchpoint of the day before for i, tmp in enumerate(day["Switchpoints"]): if time_of_day > tmp["TimeOfDay"]: sp_idx = i # current setpoint else: break # Did the current SP start yesterday? Does the next start SP tomorrow? this_sp_day = -1 if sp_idx == -1 else 0 next_sp_day = 1 if sp_idx + 1 == len(day["Switchpoints"]) else 0 for key, offset, idx in [ ("this", this_sp_day, sp_idx), ("next", next_sp_day, (sp_idx + 1) * (1 - next_sp_day)), ]: sp_date = (day_time + timedelta(days=offset)).strftime("%Y-%m-%d") day = self._schedule["DailySchedules"][(day_of_week + offset) % 7] switchpoint = day["Switchpoints"][idx] dt_aware = _dt_evo_to_aware( dt_util.parse_datetime(f"{sp_date}T{switchpoint['TimeOfDay']}"), self._evo_broker.tcs_utc_offset, ) self._setpoints[f"{key}_sp_from"] = dt_aware.isoformat() try: self._setpoints[f"{key}_sp_temp"] = switchpoint["heatSetpoint"] except KeyError: self._setpoints[f"{key}_sp_state"] = switchpoint["DhwState"] except IndexError: self._setpoints = {} _LOGGER.warning( "Failed to get setpoints, report as an issue if this error persists", exc_info=True, ) return self._setpoints async def _update_schedule(self) -> None: """Get the latest schedule, if any.""" if "DailySchedules" in self._schedule and not self._schedule["DailySchedules"]: if not self._evo_device.setpointStatus["setpointMode"] == EVO_FOLLOW: return # avoid unnecessary I/O - there's nothing to update self._schedule = await self._evo_broker.call_client_api( self._evo_device.schedule(), refresh=False ) _LOGGER.debug("Schedule['%s'] = %s", self.name, self._schedule) async def async_update(self) -> None: """Get the latest state data.""" next_sp_from = self._setpoints.get("next_sp_from", "2000-01-01T00:00:00+00:00") if dt_util.now() >= dt_util.parse_datetime(next_sp_from): await self._update_schedule() # no schedule, or it's out-of-date self._device_state_attrs = {"setpoints": self.setpoints}
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/evohome/__init__.py
"""Support for particulate matter sensors connected to a serial port.""" import logging from pmsensor import serial_pm as pm import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, CONF_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity _LOGGER = logging.getLogger(__name__) CONF_BRAND = "brand" CONF_SERIAL_DEVICE = "serial_device" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_BRAND): cv.string, vol.Required(CONF_SERIAL_DEVICE): cv.string, vol.Optional(CONF_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the available PM sensors.""" try: coll = pm.PMDataCollector( config.get(CONF_SERIAL_DEVICE), pm.SUPPORTED_SENSORS[config.get(CONF_BRAND)] ) except KeyError: _LOGGER.error( "Brand %s not supported\n supported brands: %s", config.get(CONF_BRAND), pm.SUPPORTED_SENSORS.keys(), ) return except OSError as err: _LOGGER.error( "Could not open serial connection to %s (%s)", config.get(CONF_SERIAL_DEVICE), err, ) return dev = [] for pmname in coll.supported_values(): if config.get(CONF_NAME) is not None: name = "{} PM{}".format(config.get(CONF_NAME), pmname) else: name = f"PM{pmname}" dev.append(ParticulateMatterSensor(coll, name, pmname)) add_entities(dev) class ParticulateMatterSensor(Entity): """Representation of an Particulate matter sensor.""" def __init__(self, pmDataCollector, name, pmname): """Initialize a new PM sensor.""" self._name = name self._pmname = pmname self._state = None self._collector = pmDataCollector @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return CONCENTRATION_MICROGRAMS_PER_CUBIC_METER def update(self): """Read from sensor and update the state.""" _LOGGER.debug("Reading data from PM sensor") try: self._state = self._collector.read_data()[self._pmname] except KeyError: _LOGGER.error("Could not read PM%s value", self._pmname)
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/serial_pm/sensor.py
"""Support for Luftdaten sensors.""" import logging from homeassistant.const import ( ATTR_ATTRIBUTION, ATTR_LATITUDE, ATTR_LONGITUDE, CONF_SHOW_ON_MAP, ) from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity from . import ( DATA_LUFTDATEN, DATA_LUFTDATEN_CLIENT, DEFAULT_ATTRIBUTION, DOMAIN, SENSORS, TOPIC_UPDATE, ) from .const import ATTR_SENSOR_ID _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, entry, async_add_entities): """Set up a Luftdaten sensor based on a config entry.""" luftdaten = hass.data[DOMAIN][DATA_LUFTDATEN_CLIENT][entry.entry_id] sensors = [] for sensor_type in luftdaten.sensor_conditions: try: name, icon, unit = SENSORS[sensor_type] except KeyError: _LOGGER.debug("Unknown sensor value type: %s", sensor_type) continue sensors.append( LuftdatenSensor( luftdaten, sensor_type, name, icon, unit, entry.data[CONF_SHOW_ON_MAP] ) ) async_add_entities(sensors, True) class LuftdatenSensor(Entity): """Implementation of a Luftdaten sensor.""" def __init__(self, luftdaten, sensor_type, name, icon, unit, show): """Initialize the Luftdaten sensor.""" self._async_unsub_dispatcher_connect = None self.luftdaten = luftdaten self._icon = icon self._name = name self._data = None self.sensor_type = sensor_type self._unit_of_measurement = unit self._show_on_map = show self._attrs = {} @property def icon(self): """Return the icon.""" return self._icon @property def state(self): """Return the state of the device.""" if self._data is not None: return self._data[self.sensor_type] @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement @property def should_poll(self): """Disable polling.""" return False @property def unique_id(self) -> str: """Return a unique, friendly identifier for this entity.""" if self._data is not None: return f"{self._data['sensor_id']}_{self.sensor_type}" @property def device_state_attributes(self): """Return the state attributes.""" self._attrs[ATTR_ATTRIBUTION] = DEFAULT_ATTRIBUTION if self._data is not None: self._attrs[ATTR_SENSOR_ID] = self._data["sensor_id"] on_map = ATTR_LATITUDE, ATTR_LONGITUDE no_map = "lat", "long" lat_format, lon_format = on_map if self._show_on_map else no_map try: self._attrs[lon_format] = self._data["longitude"] self._attrs[lat_format] = self._data["latitude"] return self._attrs except KeyError: return async def async_added_to_hass(self): """Register callbacks.""" @callback def update(): """Update the state.""" self.async_schedule_update_ha_state(True) self._async_unsub_dispatcher_connect = async_dispatcher_connect( self.hass, TOPIC_UPDATE, update ) async def async_will_remove_from_hass(self): """Disconnect dispatcher listener when removed.""" if self._async_unsub_dispatcher_connect: self._async_unsub_dispatcher_connect() async def async_update(self): """Get the latest data and update the state.""" try: self._data = self.luftdaten.data[DATA_LUFTDATEN] except KeyError: return
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/luftdaten/sensor.py
"""Support for LIRC devices.""" # pylint: disable=no-member, import-error import logging import threading import time import lirc import voluptuous as vol from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP _LOGGER = logging.getLogger(__name__) BUTTON_NAME = "button_name" DOMAIN = "lirc" EVENT_IR_COMMAND_RECEIVED = "ir_command_received" ICON = "mdi:remote" CONFIG_SCHEMA = vol.Schema({DOMAIN: vol.Schema({})}, extra=vol.ALLOW_EXTRA) def setup(hass, config): """Set up the LIRC capability.""" # blocking=True gives unexpected behavior (multiple responses for 1 press) # also by not blocking, we allow hass to shut down the thread gracefully # on exit. lirc.init("home-assistant", blocking=False) lirc_interface = LircInterface(hass) def _start_lirc(_event): lirc_interface.start() def _stop_lirc(_event): lirc_interface.stopped.set() hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_lirc) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _stop_lirc) return True class LircInterface(threading.Thread): """ This interfaces with the lirc daemon to read IR commands. When using lirc in blocking mode, sometimes repeated commands get produced in the next read of a command so we use a thread here to just wait around until a non-empty response is obtained from lirc. """ def __init__(self, hass): """Construct a LIRC interface object.""" threading.Thread.__init__(self) self.daemon = True self.stopped = threading.Event() self.hass = hass def run(self): """Run the loop of the LIRC interface thread.""" _LOGGER.debug("LIRC interface thread started") while not self.stopped.isSet(): try: code = lirc.nextcode() # list; empty if no buttons pressed except lirc.NextCodeError: _LOGGER.warning("Error reading next code from LIRC") code = None # interpret result from python-lirc if code: code = code[0] _LOGGER.info("Got new LIRC code %s", code) self.hass.bus.fire(EVENT_IR_COMMAND_RECEIVED, {BUTTON_NAME: code}) else: time.sleep(0.2) lirc.deinit() _LOGGER.debug("LIRC interface thread stopped")
"""Test service helpers.""" from collections import OrderedDict from copy import deepcopy import unittest import pytest import voluptuous as vol # To prevent circular import when running just this file from homeassistant import core as ha, exceptions from homeassistant.auth.permissions import PolicyPermissions import homeassistant.components # noqa: F401, pylint: disable=unused-import from homeassistant.const import ( ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, STATE_ON, ) from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, service, template, ) import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_setup_component from tests.async_mock import AsyncMock, Mock, patch from tests.common import ( MockEntity, get_test_home_assistant, mock_device_registry, mock_registry, mock_service, ) SUPPORT_A = 1 SUPPORT_B = 2 SUPPORT_C = 4 @pytest.fixture def mock_handle_entity_call(): """Mock service platform call.""" with patch( "homeassistant.helpers.service._handle_entity_call", return_value=None, ) as mock_call: yield mock_call @pytest.fixture def mock_entities(hass): """Return mock entities in an ordered dict.""" kitchen = MockEntity( entity_id="light.kitchen", available=True, should_poll=False, supported_features=SUPPORT_A, ) living_room = MockEntity( entity_id="light.living_room", available=True, should_poll=False, supported_features=SUPPORT_B, ) bedroom = MockEntity( entity_id="light.bedroom", available=True, should_poll=False, supported_features=(SUPPORT_A | SUPPORT_B), ) bathroom = MockEntity( entity_id="light.bathroom", available=True, should_poll=False, supported_features=(SUPPORT_B | SUPPORT_C), ) entities = OrderedDict() entities[kitchen.entity_id] = kitchen entities[living_room.entity_id] = living_room entities[bedroom.entity_id] = bedroom entities[bathroom.entity_id] = bathroom return entities @pytest.fixture def area_mock(hass): """Mock including area info.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) device_in_area = dev_reg.DeviceEntry(area_id="test-area") device_no_area = dev_reg.DeviceEntry() device_diff_area = dev_reg.DeviceEntry(area_id="diff-area") mock_device_registry( hass, { device_in_area.id: device_in_area, device_no_area.id: device_no_area, device_diff_area.id: device_diff_area, }, ) entity_in_area = ent_reg.RegistryEntry( entity_id="light.in_area", unique_id="in-area-id", platform="test", device_id=device_in_area.id, ) entity_no_area = ent_reg.RegistryEntry( entity_id="light.no_area", unique_id="no-area-id", platform="test", device_id=device_no_area.id, ) entity_diff_area = ent_reg.RegistryEntry( entity_id="light.diff_area", unique_id="diff-area-id", platform="test", device_id=device_diff_area.id, ) mock_registry( hass, { entity_in_area.entity_id: entity_in_area, entity_no_area.entity_id: entity_no_area, entity_diff_area.entity_id: entity_diff_area, }, ) class TestServiceHelpers(unittest.TestCase): """Test the Home Assistant service helpers.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.calls = mock_service(self.hass, "test_domain", "test_service") def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_template_service_call(self): """Test service call with templating.""" config = { "service_template": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data_template": { "hello": "{{ 'goodbye' }}", "data": {"value": "{{ 'complex' }}", "simple": "simple"}, "list": ["{{ 'list' }}", "2"], }, } service.call_from_config(self.hass, config) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" assert self.calls[0].data["data"]["value"] == "complex" assert self.calls[0].data["data"]["simple"] == "simple" assert self.calls[0].data["list"][0] == "list" def test_passing_variables_to_templates(self): """Test passing variables to templates.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ var_data }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert self.calls[0].data["hello"] == "goodbye" def test_bad_template(self): """Test passing bad template.""" config = { "service_template": "{{ var_service }}", "entity_id": "hello.world", "data_template": {"hello": "{{ states + unknown_var }}"}, } service.call_from_config( self.hass, config, variables={ "var_service": "test_domain.test_service", "var_data": "goodbye", }, ) self.hass.block_till_done() assert len(self.calls) == 0 def test_split_entity_string(self): """Test splitting of entity string.""" service.call_from_config( self.hass, { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) self.hass.block_till_done() assert ["hello.world", "sensor.beer"] == self.calls[-1].data.get("entity_id") def test_not_mutate_input(self): """Test for immutable input.""" config = cv.SERVICE_SCHEMA( { "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, } ) orig = deepcopy(config) # Only change after call is each template getting hass attached template.attach(self.hass, orig) service.call_from_config(self.hass, config, validate_config=False) assert orig == config @patch("homeassistant.helpers.service._LOGGER.error") def test_fail_silently_if_no_service(self, mock_log): """Test failing if service is missing.""" service.call_from_config(self.hass, None) assert mock_log.call_count == 1 service.call_from_config(self.hass, {}) assert mock_log.call_count == 2 service.call_from_config(self.hass, {"service": "invalid"}) assert mock_log.call_count == 3 async def test_extract_entity_ids(hass): """Test extract_entity_ids method.""" hass.states.async_set("light.Bowl", STATE_ON) hass.states.async_set("light.Ceiling", STATE_OFF) hass.states.async_set("light.Kitchen", STATE_OFF) await hass.components.group.Group.async_create_group( hass, "test", ["light.Ceiling", "light.Kitchen"] ) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call ) assert {"group.test"} == await service.async_extract_entity_ids( hass, call, expand_group=False ) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) async def test_extract_entity_ids_from_area(hass, area_mock): """Test extract_entity_ids method with areas.""" call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) assert {"light.in_area"} == await service.async_extract_entity_ids(hass, call) call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) assert { "light.in_area", "light.diff_area", } == await service.async_extract_entity_ids(hass, call) assert ( await service.async_extract_entity_ids( hass, ha.ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) async def test_async_get_all_descriptions(hass): """Test async_get_all_descriptions.""" group = hass.components.group group_config = {group.DOMAIN: {}} await async_setup_component(hass, group.DOMAIN, group_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 1 assert "description" in descriptions["group"]["reload"] assert "fields" in descriptions["group"]["reload"] logger = hass.components.logger logger_config = {logger.DOMAIN: {}} await async_setup_component(hass, logger.DOMAIN, logger_config) descriptions = await service.async_get_all_descriptions(hass) assert len(descriptions) == 2 assert "description" in descriptions[logger.DOMAIN]["set_level"] assert "fields" in descriptions[logger.DOMAIN]["set_level"] async def test_call_with_required_features(hass, mock_entities): """Test service calls invoked only if entity has required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) assert test_service_mock.call_count == 2 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_both_required_features(hass, mock_entities): """Test service calls invoked only if entity has both features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) assert test_service_mock.call_count == 1 assert [call[0][0] for call in test_service_mock.call_args_list] == [ mock_entities["light.bedroom"] ] async def test_call_with_one_of_required_features(hass, mock_entities): """Test service calls invoked with one entity having the required features.""" test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) assert test_service_mock.call_count == 3 expected = [ mock_entities["light.kitchen"], mock_entities["light.bedroom"], mock_entities["light.bathroom"], ] actual = [call[0][0] for call in test_service_mock.call_args_list] assert all(entity in actual for entity in expected) async def test_call_with_sync_func(hass, mock_entities): """Test invoking sync service calls.""" test_service_mock = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], test_service_mock, ha.ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), ) assert test_service_mock.call_count == 1 async def test_call_with_sync_attr(hass, mock_entities): """Test invoking sync service calls.""" mock_method = mock_entities["light.kitchen"].sync_method = Mock(return_value=None) await service.entity_service_call( hass, [Mock(entities=mock_entities)], "sync_method", ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, ), ) assert mock_method.call_count == 1 # We pass empty kwargs because both entity_id and area_id are filtered out assert mock_method.mock_calls[0][2] == {} async def test_call_context_user_not_exist(hass): """Check we don't allow deleted users to do things.""" with pytest.raises(exceptions.UnknownUser) as err: await service.entity_service_call( hass, [], Mock(), ha.ServiceCall( "test_domain", "test_service", context=ha.Context(user_id="non-existing"), ), ) assert err.value.context.user_id == "non-existing" async def test_call_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we only target allowed entities if targeting all.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities.""" with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock( permissions=PolicyPermissions( {"entities": {"entity_ids": {"light.kitchen": True}}}, None ) ), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_context_target_specific_no_auth( hass, mock_handle_entity_call, mock_entities ): """Check targeting specific entities without auth.""" with pytest.raises(exceptions.Unauthorized) as err: with patch( "homeassistant.auth.AuthManager.async_get_user", return_value=Mock(permissions=PolicyPermissions({}, None)), ): await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": "light.kitchen"}, context=ha.Context(user_id="mock-id"), ), ) assert err.value.context.user_id == "mock-id" assert err.value.entity_id == "light.kitchen" async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_entities): """Check we target all if no user context given.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_no_context_target_specific( hass, mock_handle_entity_call, mock_entities ): """Check we can target specified entities.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall( "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, ), ) assert len(mock_handle_entity_call.mock_calls) == 1 assert mock_handle_entity_call.mock_calls[0][1][1].entity_id == "light.kitchen" async def test_call_with_match_all( hass, mock_handle_entity_call, mock_entities, caplog ): """Check we only target allowed entities if targeting all.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list( mock_entities.values() ) async def test_call_with_omit_entity_id(hass, mock_handle_entity_call, mock_entities): """Check service call if we do not pass an entity ID.""" await service.entity_service_call( hass, [Mock(entities=mock_entities)], Mock(), ha.ServiceCall("test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 async def test_register_admin_service(hass, hass_read_only_user, hass_admin_user): """Test the register admin service.""" calls = [] async def mock_service(call): calls.append(call) hass.helpers.service.async_register_admin_service("test", "test", mock_service) hass.helpers.service.async_register_admin_service( "test", "test2", mock_service, vol.Schema({vol.Required("required"): cv.boolean}), ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id="non-existing"), ) assert len(calls) == 0 with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test", "test", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test", {"invalid": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 with pytest.raises(vol.Invalid): await hass.services.async_call( "test", "test2", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 0 await hass.services.async_call( "test", "test2", {"required": True}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 assert calls[0].context.user_id == hass_admin_user.id async def test_domain_control_not_async(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] def mock_service_log(call): """Define a protected service.""" calls.append(call) with pytest.raises(exceptions.HomeAssistantError): hass.helpers.service.verify_domain_control("test_domain")(mock_service_log) async def test_domain_control_unknown(hass, mock_entities): """Test domain verification in a service call with an unknown user.""" calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) with patch( "homeassistant.helpers.entity_registry.async_get_registry", return_value=Mock(entities=mock_entities), ): protected_mock_service = hass.helpers.service.verify_domain_control( "test_domain" )(mock_service_log) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.UnknownUser): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id="fake_user_id"), ) assert len(calls) == 0 async def test_domain_control_unauthorized(hass, hass_read_only_user): """Test domain verification in a service call with an unauthorized user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) with pytest.raises(exceptions.Unauthorized): await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_read_only_user.id), ) assert len(calls) == 0 async def test_domain_control_admin(hass, hass_admin_user): """Test domain verification in a service call with an admin user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=hass_admin_user.id), ) assert len(calls) == 1 async def test_domain_control_no_user(hass): """Test domain verification in a service call with no user.""" mock_registry( hass, { "light.kitchen": ent_reg.RegistryEntry( entity_id="light.kitchen", unique_id="kitchen", platform="test_domain", ) }, ) calls = [] async def mock_service_log(call): """Define a protected service.""" calls.append(call) protected_mock_service = hass.helpers.service.verify_domain_control("test_domain")( mock_service_log ) hass.services.async_register( "test_domain", "test_service", protected_mock_service, schema=None ) await hass.services.async_call( "test_domain", "test_service", {}, blocking=True, context=ha.Context(user_id=None), ) assert len(calls) == 1 async def test_extract_from_service_available_device(hass): """Test the extraction of entity from service and device is available.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2", available=False), MockEntity(name="test_3", entity_id="test_domain.test_3"), MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] call_1 = ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) ] call_2 = ha.ServiceCall( "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) ] assert ( await service.async_extract_entities( hass, entities, ha.ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_NONE},), ) == [] ) async def test_extract_from_service_empty_if_no_entity_id(hass): """Test the extraction from service without specifying entity.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall("test", "service") assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_filter_out_non_existing_entities(hass): """Test the extraction of non existing entities from service.""" entities = [ MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] call = ha.ServiceCall( "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) ] async def test_extract_from_service_area_id(hass, area_mock): """Test the extraction using area ID as reference.""" entities = [ MockEntity(name="in_area", entity_id="light.in_area"), MockEntity(name="no_area", entity_id="light.no_area"), MockEntity(name="diff_area", entity_id="light.diff_area"), ] call = ha.ServiceCall("light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" call = ha.ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ "light.diff_area", "light.in_area", ]
pschmitt/home-assistant
tests/helpers/test_service.py
homeassistant/components/lirc/__init__.py
# GPIO Zero: a library for controlling the Raspberry Pi's GPIO pins # Copyright (c) 2015-2019 Dave Jones <dave@waveform.org.uk> # Copyright (c) 2015-2019 Ben Nuttall <ben@bennuttall.com> # Copyright (c) 2016 Andrew Scheller <github@loowis.durge.org> # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from __future__ import ( unicode_literals, print_function, absolute_import, division, ) nstr = str str = type('') import os import atexit import weakref import warnings from collections import namedtuple, OrderedDict from itertools import chain from types import FunctionType from threading import Lock from .pins import Pin from .threads import _threads_shutdown from .mixins import ( ValuesMixin, SharedMixin, ) from .exc import ( BadPinFactory, DeviceClosed, CompositeDeviceBadName, CompositeDeviceBadOrder, CompositeDeviceBadDevice, GPIOPinMissing, GPIOPinInUse, GPIODeviceClosed, PinFactoryFallback, ) from .compat import frozendict class GPIOMeta(type): # NOTE Yes, this is a metaclass. Don't be scared - it's a simple one. def __new__(mcls, name, bases, cls_dict): # Construct the class as normal cls = super(GPIOMeta, mcls).__new__(mcls, name, bases, cls_dict) # If there's a method in the class which has no docstring, search # the base classes recursively for a docstring to copy for attr_name, attr in cls_dict.items(): if isinstance(attr, FunctionType) and not attr.__doc__: for base_cls in cls.__mro__: if hasattr(base_cls, attr_name): base_fn = getattr(base_cls, attr_name) if base_fn.__doc__: attr.__doc__ = base_fn.__doc__ break return cls def __call__(cls, *args, **kwargs): # Make sure cls has GPIOBase somewhere in its ancestry (otherwise # setting __attrs__ below will be rather pointless) assert issubclass(cls, GPIOBase) if issubclass(cls, SharedMixin): # If SharedMixin appears in the class' ancestry, convert the # constructor arguments to a key and check whether an instance # already exists. Only construct the instance if the key's new. key = cls._shared_key(*args, **kwargs) try: self = cls._instances[key] self._refs += 1 except (KeyError, ReferenceError) as e: self = super(GPIOMeta, cls).__call__(*args, **kwargs) self._refs = 1 # Replace the close method with one that merely decrements # the refs counter and calls the original close method when # it reaches zero old_close = self.close def close(): self._refs = max(0, self._refs - 1) if not self._refs: try: old_close() finally: try: del cls._instances[key] except KeyError: # If the _refs go negative (too many closes) # just ignore the resulting KeyError here - # it's already gone pass self.close = close cls._instances[key] = weakref.proxy(self) else: # Construct the instance as normal self = super(GPIOMeta, cls).__call__(*args, **kwargs) # At this point __new__ and __init__ have all been run. We now fix the # set of attributes on the class by dir'ing the instance and creating a # frozenset of the result called __attrs__ (which is queried by # GPIOBase.__setattr__). An exception is made for SharedMixin devices # which can be constructed multiple times, returning the same instance if not issubclass(cls, SharedMixin) or self._refs == 1: self.__attrs__ = frozenset(dir(self)) return self # Cross-version compatible method of using a metaclass class GPIOBase(GPIOMeta(nstr('GPIOBase'), (), {})): def __setattr__(self, name, value): # This overridden __setattr__ simply ensures that additional attributes # cannot be set on the class after construction (it manages this in # conjunction with the meta-class above). Traditionally, this is # managed with __slots__; however, this doesn't work with Python's # multiple inheritance system which we need to use in order to avoid # repeating the "source" and "values" property code in myriad places if hasattr(self, '__attrs__') and name not in self.__attrs__: raise AttributeError( "'%s' object has no attribute '%s'" % ( self.__class__.__name__, name)) return super(GPIOBase, self).__setattr__(name, value) def __del__(self): self.close() def close(self): """ Shut down the device and release all associated resources. This method can be called on an already closed device without raising an exception. This method is primarily intended for interactive use at the command line. It disables the device and releases its pin(s) for use by another device. You can attempt to do this simply by deleting an object, but unless you've cleaned up all references to the object this may not work (even if you've cleaned up all references, there's still no guarantee the garbage collector will actually delete the object at that point). By contrast, the close method provides a means of ensuring that the object is shut down. For example, if you have a breadboard with a buzzer connected to pin 16, but then wish to attach an LED instead: >>> from gpiozero import * >>> bz = Buzzer(16) >>> bz.on() >>> bz.off() >>> bz.close() >>> led = LED(16) >>> led.blink() :class:`Device` descendents can also be used as context managers using the :keyword:`with` statement. For example: >>> from gpiozero import * >>> with Buzzer(16) as bz: ... bz.on() ... >>> with LED(16) as led: ... led.on() ... """ # This is a placeholder which is simply here to ensure close() can be # safely called from subclasses without worrying whether super-classes # have it (which in turn is useful in conjunction with the SourceMixin # class). pass @property def closed(self): """ Returns :data:`True` if the device is closed (see the :meth:`close` method). Once a device is closed you can no longer use any other methods or properties to control or query the device. """ raise NotImplementedError def _check_open(self): if self.closed: raise DeviceClosed( '%s is closed or uninitialized' % self.__class__.__name__) def __enter__(self): return self def __exit__(self, exc_type, exc_value, exc_tb): self.close() class Device(ValuesMixin, GPIOBase): """ Represents a single device of any type; GPIO-based, SPI-based, I2C-based, etc. This is the base class of the device hierarchy. It defines the basic services applicable to all devices (specifically the :attr:`is_active` property, the :attr:`value` property, and the :meth:`close` method). .. attribute:: pin_factory This attribute exists at both a class level (representing the default pin factory used to construct devices when no *pin_factory* parameter is specified), and at an instance level (representing the pin factory that the device was constructed with). The pin factory provides various facilities to the device including allocating pins, providing low level interfaces (e.g. SPI), and clock facilities (querying and calculating elapsed times). """ pin_factory = None # instance of a Factory sub-class def __init__(self, **kwargs): # Force pin_factory to be keyword-only, even in Python 2 pin_factory = kwargs.pop('pin_factory', None) if pin_factory is None: if Device.pin_factory is None: Device.pin_factory = Device._default_pin_factory() self.pin_factory = Device.pin_factory else: self.pin_factory = pin_factory if kwargs: raise TypeError("Device.__init__() got unexpected keyword " "argument '%s'" % kwargs.popitem()[0]) super(Device, self).__init__() @staticmethod def _default_pin_factory(): # We prefer RPi.GPIO here as it supports PWM, and all Pi revisions. If # no third-party libraries are available, however, we fall back to a # pure Python implementation which supports platforms like PyPy # # NOTE: If the built-in pin factories are expanded, the dict must be # updated along with the entry-points in setup.py. default_factories = OrderedDict(( ('rpigpio', 'gpiozero.pins.rpigpio:RPiGPIOFactory'), ('rpio', 'gpiozero.pins.rpio:RPIOFactory'), ('pigpio', 'gpiozero.pins.pigpio:PiGPIOFactory'), ('native', 'gpiozero.pins.native:NativeFactory'), )) name = os.environ.get('GPIOZERO_PIN_FACTORY') if name is None: # If no factory is explicitly specified, try various names in # "preferred" order. For speed, we select from the dictionary above # rather than importing pkg_resources and using load_entry_point for name, entry_point in default_factories.items(): try: mod_name, cls_name = entry_point.split(':', 1) module = __import__(mod_name, fromlist=(cls_name,)) return getattr(module, cls_name)() except Exception as e: warnings.warn( PinFactoryFallback( 'Falling back from %s: %s' % (name, str(e)))) raise BadPinFactory('Unable to load any default pin factory!') elif name in default_factories: # As above, this is a fast-path optimization to avoid loading # pkg_resources (which it turns out was 80% of gpiozero's import # time!) mod_name, cls_name = default_factories[name].split(':', 1) module = __import__(mod_name, fromlist=(cls_name,)) return getattr(module, cls_name)() else: # Slow path: load pkg_resources and try and find the specified # entry-point. Try with the name verbatim first. If that fails, # attempt with the lower-cased name (this ensures compatibility # names work but we're still case insensitive for all factories) import pkg_resources group = 'gpiozero_pin_factories' for factory in pkg_resources.iter_entry_points(group, name): return factory.load()() for factory in pkg_resources.iter_entry_points(group, name.lower()): return factory.load()() raise BadPinFactory('Unable to find pin factory "%s"' % name) def __repr__(self): return "<gpiozero.%s object>" % (self.__class__.__name__) def _conflicts_with(self, other): """ Called by :meth:`Factory.reserve_pins` to test whether the *other* :class:`Device` using a common pin conflicts with this device's intent to use it. The default is :data:`True` indicating that all devices conflict with common pins. Sub-classes may override this to permit more nuanced replies. """ return True @property def value(self): """ Returns a value representing the device's state. Frequently, this is a boolean value, or a number between 0 and 1 but some devices use larger ranges (e.g. -1 to +1) and composite devices usually use tuples to return the states of all their subordinate components. """ raise NotImplementedError @property def is_active(self): """ Returns :data:`True` if the device is currently active and :data:`False` otherwise. This property is usually derived from :attr:`value`. Unlike :attr:`value`, this is *always* a boolean. """ return bool(self.value) class CompositeDevice(Device): """ Extends :class:`Device`. Represents a device composed of multiple devices like simple HATs, H-bridge motor controllers, robots composed of multiple motors, etc. The constructor accepts subordinate devices as positional or keyword arguments. Positional arguments form unnamed devices accessed by treating the composite device as a container, while keyword arguments are added to the device as named (read-only) attributes. For example: .. code-block:: pycon >>> from gpiozero import * >>> d = CompositeDevice(LED(2), LED(3), LED(4), btn=Button(17)) >>> d[0] <gpiozero.LED object on pin GPIO2, active_high=True, is_active=False> >>> d[1] <gpiozero.LED object on pin GPIO3, active_high=True, is_active=False> >>> d[2] <gpiozero.LED object on pin GPIO4, active_high=True, is_active=False> >>> d.btn <gpiozero.Button object on pin GPIO17, pull_up=True, is_active=False> >>> d.value CompositeDeviceValue(device_0=False, device_1=False, device_2=False, btn=False) :param Device \\*args: The un-named devices that belong to the composite device. The :attr:`value` attributes of these devices will be represented within the composite device's tuple :attr:`value` in the order specified here. :type _order: list or None :param _order: If specified, this is the order of named items specified by keyword arguments (to ensure that the :attr:`value` tuple is constructed with a specific order). All keyword arguments *must* be included in the collection. If omitted, an alphabetically sorted order will be selected for keyword arguments. :type pin_factory: Factory or None :param pin_factory: See :doc:`api_pins` for more information (this is an advanced feature which most users can ignore). :param Device \\*\\*kwargs: The named devices that belong to the composite device. These devices will be accessible as named attributes on the resulting device, and their :attr:`value` attributes will be accessible as named elements of the composite device's tuple :attr:`value`. """ def __init__(self, *args, **kwargs): self._all = () self._named = frozendict({}) self._namedtuple = None self._order = kwargs.pop('_order', None) pin_factory = kwargs.pop('pin_factory', None) try: if self._order is None: self._order = sorted(kwargs.keys()) else: for missing_name in set(kwargs.keys()) - set(self._order): raise CompositeDeviceBadOrder( '%s missing from _order' % missing_name) self._order = tuple(self._order) for name in set(self._order) & set(dir(self)): raise CompositeDeviceBadName( '%s is a reserved name' % name) for dev in chain(args, kwargs.values()): if not isinstance(dev, Device): raise CompositeDeviceBadDevice( "%s doesn't inherit from Device" % dev) self._named = frozendict(kwargs) self._namedtuple = namedtuple( '%sValue' % self.__class__.__name__, chain( ('device_%d' % i for i in range(len(args))), self._order)) except: for dev in chain(args, kwargs.values()): if isinstance(dev, Device): dev.close() raise self._all = args + tuple(kwargs[v] for v in self._order) super(CompositeDevice, self).__init__(pin_factory=pin_factory) def __getattr__(self, name): # if _named doesn't exist yet, pretend it's an empty dict if name == '_named': return frozendict({}) try: return self._named[name] except KeyError: raise AttributeError("no such attribute %s" % name) def __setattr__(self, name, value): # make named components read-only properties if name in self._named: raise AttributeError("can't set attribute %s" % name) return super(CompositeDevice, self).__setattr__(name, value) def __repr__(self): try: self._check_open() named = len(self._named) unnamed = len(self) - len(self._named) if named > 0 and unnamed > 0: return "<gpiozero.%s object containing %d devices: %s and %d unnamed>" % ( self.__class__.__name__, len(self), ', '.join(self._order), len(self) - len(self._named) ) elif named > 0: return "<gpiozero.%s object containing %d devices: %s>" % ( self.__class__.__name__, len(self), ', '.join(self._order) ) else: return "<gpiozero.%s object containing %d unnamed devices>" % ( self.__class__.__name__, len(self) ) except DeviceClosed: return "<gpiozero.%s object closed>" % (self.__class__.__name__) def __len__(self): return len(self._all) def __getitem__(self, index): return self._all[index] def __iter__(self): return iter(self._all) @property def all(self): # XXX Deprecate this in favour of using the instance as a container return self._all def close(self): if getattr(self, '_all', None): for device in self._all: if isinstance(device, Device): device.close() self._all = () @property def closed(self): return all(device.closed for device in self) @property def namedtuple(self): """ The :func:`~collections.namedtuple` type constructed to represent the value of the composite device. The :attr:`value` attribute returns values of this type. """ return self._namedtuple @property def value(self): """ A :func:`~collections.namedtuple` containing a value for each subordinate device. Devices with names will be represented as named elements. Unnamed devices will have a unique name generated for them, and they will appear in the position they appeared in the constructor. """ return self.namedtuple(*(device.value for device in self)) @property def is_active(self): """ Composite devices are considered "active" if any of their constituent devices have a "truthy" value. """ return any(self.value) class GPIODevice(Device): """ Extends :class:`Device`. Represents a generic GPIO device and provides the services common to all single-pin GPIO devices (like ensuring two GPIO devices do no share a :attr:`pin`). :type pin: int or str :param pin: The GPIO pin that the device is connected to. See :ref:`pin-numbering` for valid pin numbers. If this is :data:`None` a :exc:`GPIODeviceError` will be raised. If the pin is already in use by another device, :exc:`GPIOPinInUse` will be raised. """ def __init__(self, pin=None, **kwargs): super(GPIODevice, self).__init__(**kwargs) # self._pin must be set before any possible exceptions can be raised # because it's accessed in __del__. However, it mustn't be given the # value of pin until we've verified that it isn't already allocated self._pin = None if pin is None: raise GPIOPinMissing('No pin given') # Check you can reserve *before* constructing the pin self.pin_factory.reserve_pins(self, pin) pin = self.pin_factory.pin(pin) self._pin = pin self._active_state = True self._inactive_state = False def _state_to_value(self, state): return int(state == self._active_state) def _read(self): try: return self._state_to_value(self.pin.state) except (AttributeError, TypeError): self._check_open() raise def close(self): super(GPIODevice, self).close() if getattr(self, '_pin', None) is not None: self.pin_factory.release_pins(self, self._pin.number) self._pin.close() self._pin = None @property def closed(self): return self._pin is None def _check_open(self): try: super(GPIODevice, self)._check_open() except DeviceClosed as e: # For backwards compatibility; GPIODeviceClosed is deprecated raise GPIODeviceClosed(str(e)) @property def pin(self): """ The :class:`Pin` that the device is connected to. This will be :data:`None` if the device has been closed (see the :meth:`~Device.close` method). When dealing with GPIO pins, query ``pin.number`` to discover the GPIO pin (in BCM numbering) that the device is connected to. """ return self._pin @property def value(self): return self._read() def __repr__(self): try: return "<gpiozero.%s object on pin %r, is_active=%s>" % ( self.__class__.__name__, self.pin, self.is_active) except DeviceClosed: return "<gpiozero.%s object closed>" % self.__class__.__name__ def _devices_shutdown(): if Device.pin_factory is not None: with Device.pin_factory._res_lock: reserved_devices = { dev for ref_list in Device.pin_factory._reservations.values() for ref in ref_list for dev in (ref(),) if dev is not None } for dev in reserved_devices: dev.close() Device.pin_factory.close() Device.pin_factory = None def _shutdown(): _threads_shutdown() _devices_shutdown() atexit.register(_shutdown)
# GPIO Zero: a library for controlling the Raspberry Pi's GPIO pins # Copyright (c) 2016-2019 Dave Jones <dave@waveform.org.uk> # Copyright (c) 2019 Ben Nuttall <ben@bennuttall.com> # Copyright (c) 2016 Andrew Scheller <github@loowis.durge.org> # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from __future__ import ( unicode_literals, absolute_import, print_function, division, ) str = type('') import os import warnings from mock import patch import pytest import errno from gpiozero import * from gpiozero.pins.mock import MockFactory file_not_found = IOError(errno.ENOENT, 'File not found') def test_default_pin_factory_order(): with patch('sys.path') as path, \ patch('io.open') as io, \ patch('os.environ.get') as get: # ensure no pin libraries can be imported path.return_value = [] # ensure /proc/device-tree... is not found when trying native io.return_value.__enter__.side_effect = file_not_found # ensure pin factory not set in env var get.return_value = None with warnings.catch_warnings(record=True) as ws: warnings.resetwarnings() with pytest.raises(BadPinFactory): device = GPIODevice(2) assert len(ws) == 4 assert all(w.category == PinFactoryFallback for w in ws) assert ws[0].message.args[0].startswith('Falling back from rpigpio:') assert ws[1].message.args[0].startswith('Falling back from rpio:') assert ws[2].message.args[0].startswith('Falling back from pigpio:') assert ws[3].message.args[0].startswith('Falling back from native:') def test_device_bad_pin(mock_factory): with pytest.raises(GPIOPinMissing): device = GPIODevice() with pytest.raises(PinInvalidPin): device = GPIODevice(60) with pytest.raises(PinInvalidPin): device = GPIODevice('BCM60') with pytest.raises(PinInvalidPin): device = GPIODevice('WPI32') with pytest.raises(PinInvalidPin): device = GPIODevice(b'P2:2') with pytest.raises(PinInvalidPin): device = GPIODevice('J8:42') with pytest.raises(PinInvalidPin): device = GPIODevice('J8:1') with pytest.raises(PinInvalidPin): device = GPIODevice('foo') def test_device_non_physical(mock_factory): with warnings.catch_warnings(record=True) as w: warnings.resetwarnings() device = GPIODevice('GPIO37') assert len(w) == 1 assert w[0].category == PinNonPhysical def test_device_init(mock_factory): pin = mock_factory.pin(2) with GPIODevice(2) as device: assert repr(device).startswith('<gpiozero.GPIODevice object') assert not device.closed assert device.pin is pin with pytest.raises(TypeError): GPIODevice(2, foo='bar') def test_device_init_twice_same_pin(mock_factory): with GPIODevice(2) as device: with pytest.raises(GPIOPinInUse): GPIODevice(2) def test_device_init_twice_same_pin_different_spec(mock_factory): with GPIODevice(2) as device: with pytest.raises(GPIOPinInUse): GPIODevice("BOARD3") def test_device_init_twice_different_pin(mock_factory): with GPIODevice(2) as device: with GPIODevice(3) as device2: pass def test_device_close(mock_factory): device = GPIODevice(2) # Don't use "with" here; we're testing close explicitly device.close() assert device.closed assert device.pin is None def test_device_reopen_same_pin(mock_factory): pin = mock_factory.pin(2) with GPIODevice(2) as device: pass with GPIODevice(2) as device2: assert not device2.closed assert device2.pin is pin assert device.closed assert device.pin is None def test_device_pin_parsing(mock_factory): # MockFactory defaults to a Pi 3B layout pin = mock_factory.pin(2) with GPIODevice('GPIO2') as device: assert device.pin is pin with GPIODevice('BCM2') as device: assert device.pin is pin with GPIODevice('WPI8') as device: assert device.pin is pin with GPIODevice('BOARD3') as device: assert device.pin is pin with GPIODevice('J8:3') as device: assert device.pin is pin def test_device_repr(mock_factory): with GPIODevice(4) as device: assert repr(device) == ( '<gpiozero.GPIODevice object on pin %s, ' 'is_active=False>' % device.pin) def test_device_repr_after_close(mock_factory): with GPIODevice(2) as device: pass assert repr(device) == '<gpiozero.GPIODevice object closed>' def test_device_unknown_attr(mock_factory): with GPIODevice(2) as device: with pytest.raises(AttributeError): device.foo = 1 def test_device_broken_attr(mock_factory): with GPIODevice(2) as device: del device._active_state with pytest.raises(AttributeError): device.value def test_device_context_manager(mock_factory): with GPIODevice(2) as device: assert not device.closed assert device.closed def test_composite_device_sequence(mock_factory): with CompositeDevice(InputDevice(4), InputDevice(5)) as device: assert repr(device).startswith('<gpiozero.CompositeDevice object') assert len(device) == 2 assert device[0].pin.number == 4 assert device[1].pin.number == 5 assert device.namedtuple._fields == ('device_0', 'device_1') def test_composite_device_values(mock_factory): with CompositeDevice(InputDevice(4), InputDevice(5)) as device: assert repr(device) == '<gpiozero.CompositeDevice object containing 2 unnamed devices>' assert device.value == (0, 0) assert not device.is_active device[0].pin.drive_high() assert device.value == (1, 0) assert device.is_active def test_composite_device_named(mock_factory): with CompositeDevice( foo=InputDevice(4), bar=InputDevice(5), _order=('foo', 'bar') ) as device: assert repr(device) == '<gpiozero.CompositeDevice object containing 2 devices: foo, bar>' assert device.namedtuple._fields == ('foo', 'bar') assert device.value == (0, 0) assert not device.is_active def test_composite_device_some_named(mock_factory): with CompositeDevice( InputDevice(4), foobar=InputDevice(5), ) as device: assert repr(device) == '<gpiozero.CompositeDevice object containing 2 devices: foobar and 1 unnamed>' assert device.namedtuple._fields == ('device_0', 'foobar') assert device.value == (0, 0) assert not device.is_active def test_composite_device_bad_init(mock_factory): with pytest.raises(ValueError): CompositeDevice(foo=1, bar=2, _order=('foo',)) with pytest.raises(ValueError): CompositeDevice(close=1) with pytest.raises(ValueError): CompositeDevice(2) with pytest.raises(ValueError): CompositeDevice(mock_factory.pin(2)) def test_composite_device_read_only(mock_factory): with CompositeDevice(foo=InputDevice(4), bar=InputDevice(5)) as device: with pytest.raises(AttributeError): device.foo = 1 def test_shutdown(mock_factory): from gpiozero.devices import _shutdown ds = DistanceSensor(17, 19) f = Device.pin_factory _shutdown() assert ds.closed assert not f.pins assert Device.pin_factory is None # Shutdown must be idempotent _shutdown()
RPi-Distro/python-gpiozero
tests/test_devices.py
gpiozero/devices.py
from __future__ import unicode_literals, division, absolute_import from builtins import * # noqa pylint: disable=unused-import, redefined-builtin import logging import re from flexget.utils.titles.parser import TitleParser from flexget.utils import qualities from flexget.utils.tools import str_to_int from datetime import datetime log = logging.getLogger('movieparser') def diff_pos(string1, string2): """Returns first position where string1 and string2 differ.""" for (count, c) in enumerate(string1): if len(string2) <= count: return count if string2[count] != c: return count class MovieParser(TitleParser): def __init__(self): self.data = None self.reset() TitleParser.__init__(self) @property def fields(self): """ Return a dict of all parser fields """ return { 'movie_parser': self, 'movie_name': self.name, 'movie_year': self.year, 'proper': self.proper, 'proper_count': self.proper_count } @property def valid(self): return True @property def proper(self): return self.proper_count > 0 @property def is_series(self): return False @property def is_movie(self): return True def reset(self): # parsing results self.name = None self.year = None self.year_pos = None self.quality = qualities.Quality() self.proper_count = 0 def __str__(self): return "<MovieParser(name=%s,year=%s,quality=%s)>" % (self.name, self.year, self.quality) def parse(self, data=None): """Parse movie name. Populates name, year, quality and proper_count attributes""" # Reset before parsing, so the parser can be reused. self.reset() if data is None: data = self.data # Move anything in leading brackets to the end data = re.sub(r'^\[(.*?)\](.*)', r'\2 \1', data) for char in '[]()_,.': data = data.replace(char, ' ') # if there are no spaces if data.find(' ') == -1: data = data.replace('-', ' ') # remove unwanted words (imax, ..) self.remove_words(data, self.remove) data = self.strip_spaces(data) # split to parts parts = data.split(' ') cut_part = 256 all_caps = True for part_pos, part in enumerate(parts): cut = False # Don't let the first word be cutoff word if part_pos < 1: continue # check for year num = str_to_int(part) if num is not None: if 1930 < num <= datetime.now().year: if self.year_pos == cut_part: # Looks like a year, but we already set the cutpoint to a year, let's move it forward cut_part = part_pos self.year = num self.year_pos = part_pos cut = True # Don't consider all caps words cut words if the whole title has been all caps if not part.isupper(): all_caps = False # if length > 3 and whole word in uppers, consider as cut word (most likely a group name) if len(part) > 3 and part.isupper() and part.isalpha() and not all_caps: cut = True # check for cutoff words if part.lower() in self.cutoffs: cut = True # check for propers if part.lower() in self.propers: # 'real' and 'final' are too common in movie titles, only cut if it comes after year if part.lower() not in ['real', 'final'] or self.year: self.proper_count += 1 cut = True # update cut position if cut and parts.index(part) < cut_part: cut_part = part_pos if cut_part != 256: log.debug('parts: %s, cut is: %s', parts, parts[cut_part]) # calculate cut positon from cut_part abs_cut = len(' '.join(parts[:cut_part])) log.debug('after parts check, cut data would be: `%s` abs_cut: %i', data[:abs_cut], abs_cut) # parse quality quality = qualities.Quality(data) if quality: self.quality = quality # remaining string is same as data but quality information removed # find out position where there is first difference, this is earliest # quality bit, anything after that has no relevance to the movie name dp = diff_pos(data, quality.clean_text) if dp is not None: log.debug('quality start: %s', dp) if dp < abs_cut: log.debug('quality cut is even shorter') abs_cut = dp # make cut data = data[:abs_cut].strip() log.debug('data cut to `%s` - this will be the name', data) # save results self.name = data
from __future__ import unicode_literals, division, absolute_import from builtins import * # noqa pylint: disable=unused-import, redefined-builtin import pytest from datetime import datetime, timedelta from flexget.plugins.filter.seen import SeenEntry from flexget.api.app import base_message from flexget.api.plugins.series import ObjectsContainer as OC from flexget.api.plugins.tvdb_lookup import ObjectsContainer as tvdb from flexget.api.plugins.tvmaze_lookup import ObjectsContainer as tvmaze from flexget.manager import Session from flexget.plugins.filter.series import Series, SeriesTask, Episode, EpisodeRelease, AlternateNames from flexget.utils import json class TestSeriesRootAPI(object): config = """ tasks: {} """ def test_series_root_get(self, api_client, schema_match): # No params rsp = api_client.get('/series/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert data == [] with Session() as session: series = Series() series.name = 'test series' session.add(series) task = SeriesTask('test task') series.in_tasks = [task] rsp = api_client.get('/series/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors show = data[0] errors = schema_match(OC.single_series_object, show) assert not errors assert show['name'] == 'test series' def test_series_configured_param(self, api_client, schema_match): with Session() as session: series = Series() series.name = 'test series' session.add(series) # Default is configured series, no results rsp = api_client.get('/series/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert data == [] # Get unconfigured series rsp = api_client.get('/series/?in_config=unconfigured') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors show = data[0] errors = schema_match(OC.single_series_object, show) assert not errors assert len(data) == 1 assert show['name'] == 'test series' # Add a configured series with Session() as session: series = Series() series.name = 'test series 2' session.add(series) task = SeriesTask('test task') series.in_tasks = [task] # Get only configures series rsp = api_client.get('/series/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors show = data[0] errors = schema_match(OC.single_series_object, show) assert not errors assert len(data) == 1 assert show['name'] == 'test series 2' # Get all series rsp = api_client.get('/series/?in_config=all') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert len(data) == 2 def test_series_premieres_param(self, api_client, schema_match): # Add a series with an episode of S02E05, not a premiere with Session() as session: series = Series() series.name = 'test series' session.add(series) task = SeriesTask('test task') series.in_tasks = [task] episode = Episode() episode.identifier = 'S02E05' episode.identified_by = 'ep' episode.season = 2 episode.number = 5 episode.series_id = series.id series.episodes.append(episode) release = EpisodeRelease() release.title = 'test release' release.downloaded = True episode.releases = [release] # Default all, not just premieres rsp = api_client.get('/series/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert len(data) == 1 # Get only premieres rsp = api_client.get('/series/?premieres=true') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert len(data) == 0 # Add a premiere episode to another series with Session() as session: series = Series() series.name = 'test series 2' session.add(series) task = SeriesTask('test task 2') series.in_tasks = [task] episode = Episode() episode.identifier = 'S01E01' episode.identified_by = 'ep' episode.season = 1 episode.number = 1 series.episodes.append(episode) release = EpisodeRelease() release.title = 'test release 2' release.downloaded = True episode.releases = [release] # Get only just premieres rsp = api_client.get('/series/?premieres=true') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert len(data) == 1 def test_series_status_param(self, api_client, schema_match): # Add an episode with a release created now with Session() as session: series = Series() series.name = 'test series' session.add(series) task = SeriesTask('test task') series.in_tasks = [task] episode = Episode() episode.identifier = 'S02E05' episode.identified_by = 'ep' episode.season = 2 episode.number = 5 episode.series_id = series.id series.episodes.append(episode) release = EpisodeRelease() release.title = 'test release' release.downloaded = True episode.releases = [release] # Add an episode with a release created 8 days ago with Session() as session: series = Series() series.name = 'test series 2' session.add(series) task = SeriesTask('test task') series.in_tasks = [task] episode = Episode() episode.identifier = 'S01E01' episode.identified_by = 'ep' episode.season = 1 episode.number = 1 episode.series_id = series.id series.episodes.append(episode) release = EpisodeRelease() release.title = 'test release 2' release.downloaded = True release.first_seen = datetime.now() - timedelta(days=8) episode.releases = [release] # Default all, not just status = new rsp = api_client.get('/series/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert len(data) == 2 # Just new rsp = api_client.get('/series/?status=new') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert len(data) == 1 # New with days param rsp = api_client.get('/series/?status=new&days=9') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert len(data) == 2 # Just stale rsp = api_client.get('/series/?status=stale') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert len(data) == 0 # Add an episode with a release created over a year ago with Session() as session: series = Series() series.name = 'test series 3' session.add(series) task = SeriesTask('test task') series.in_tasks = [task] episode = Episode() episode.identifier = 'S01E01' episode.identified_by = 'ep' episode.season = 1 episode.number = 1 episode.series_id = series.id series.episodes.append(episode) release = EpisodeRelease() release.title = 'test release 3' release.downloaded = True release.first_seen = datetime.now() - timedelta(days=366) episode.releases = [release] # Just stale rsp = api_client.get('/series/?status=stale') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert len(data) == 1 @pytest.mark.online def test_series_lookup_param(self, api_client, schema_match): # Add two real shows with Session() as session: series = Series() series.name = 'Suits' session.add(series) series2 = Series() series2.name = 'Stranger Things' session.add(series2) rsp = api_client.get('/series/?in_config=all&lookup=tvdb&lookup=tvmaze') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert len(data) == 2 for show in data: tvdb_lookup = show['lookup']['tvdb'] assert tvdb_lookup errors = schema_match(tvdb.tvdb_series_object, tvdb_lookup) assert not errors tvmaze_lookup = show['lookup']['tvmaze'] assert tvmaze_lookup errors = schema_match(tvmaze.tvmaze_series_object, tvmaze_lookup) assert not errors def test_series_post(self, api_client, schema_match): payload = {'name': 'test series'} # Minimal payload rsp = api_client.json_post('/series/', data=json.dumps(payload)) assert rsp.status_code == 201, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.single_series_object, data) assert not errors # Try to add again rsp = api_client.json_post('/series/', data=json.dumps(payload)) assert rsp.status_code == 409, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors payload2 = {'name': 'test series 2', 'begin_episode': 'bla'} # Invalid begin episode rsp = api_client.json_post('/series/', data=json.dumps(payload2)) assert rsp.status_code == 422, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors payload3 = {'name': 'test series 2', 'begin_episode': 's01e01', 'alternate_names': [ 'show1', 'show2' ]} # Maximal payload rsp = api_client.json_post('/series/', data=json.dumps(payload3)) assert rsp.status_code == 201, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.single_series_object, data) assert not errors assert data['name'] == payload3['name'] assert data['alternate_names'] == payload3['alternate_names'] assert data['begin_episode']['identifier'].lower() == payload3['begin_episode'] payload4 = {'name': 'test series 3', 'alternate_names': ['show1']} # Alternate name already added to different show rsp = api_client.json_post('/series/', data=json.dumps(payload4)) assert rsp.status_code == 409, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors class TestSeriesSearchAPI(object): config = """ tasks: {} """ def test_series_search(self, api_client, schema_match): with Session() as session: series1 = Series() series1.name = 'test series1' session.add(series1) series2 = Series() series2.name = 'test series2' session.add(series2) rsp = api_client.get('/series/search/test/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert len(data) == 2 rsp = api_client.get('/series/search/series1/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert len(data) == 1 rsp = api_client.get('/series/search/bla/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert len(data) == 0 class TestSeriesSingleAPI(object): config = """ tasks: {} """ def test_series_get(self, api_client, schema_match): with Session() as session: series1 = Series() series1.name = 'test series1' session.add(series1) rsp = api_client.get('/series/1/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.single_series_object, data) assert not errors assert data['name'] == 'test series1' # No existing ID rsp = api_client.get('/series/10/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors def test_series_delete(self, api_client, schema_match): with Session() as session: series1 = Series() series1.name = 'test series1' session.add(series1) rsp = api_client.delete('/series/1/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Delete again, no existing ID rsp = api_client.delete('/series/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors def test_series_put(self, api_client, schema_match): with Session() as session: series1 = Series() series1.name = 'test series1' session.add(series1) payload = {} # Validation error rsp = api_client.json_put('/series/1/', data=json.dumps(payload)) assert rsp.status_code == 422, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors payload1 = {'begin_episode': 's01e01', 'alternate_names': ['show1']} rsp = api_client.json_put('/series/1/', data=json.dumps(payload1)) assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.single_series_object, data) assert not errors assert data['begin_episode']['identifier'].lower() == payload1['begin_episode'] assert data['alternate_names'] == payload1['alternate_names'] with Session() as session: series = Series() series.name = 'test series2' session.add(series) alt = AlternateNames('show2') series.alternate_names = [alt] payload2 = {'alternate_names': ['show2']} # Alternate name used by another show rsp = api_client.json_put('/series/1/', data=json.dumps(payload2)) assert rsp.status_code == 409, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Non existent show rsp = api_client.json_put('/series/10/', data=json.dumps(payload2)) assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors class TestSeriesEpisodesAPI(object): config = """ tasks: {} """ def test_episodes_get(self, api_client, schema_match): with Session() as session: series = Series() series.name = 'test series 1' session.add(series) task = SeriesTask('test task') series.in_tasks = [task] episode1 = Episode() episode1.identifier = 'S01E01' episode1.identified_by = 'ep' episode1.season = 1 episode1.number = 1 episode1.series_id = series.id episode2 = Episode() episode2.identifier = 'S01E01' episode2.identified_by = 'ep' episode2.season = 1 episode2.number = 1 episode2.series_id = series.id release = EpisodeRelease() release.title = 'test release' release.downloaded = True episode1.releases = [release] series.episodes.append(episode1) series.episodes.append(episode2) # No series rsp = api_client.get('/series/10/episodes/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors rsp = api_client.get('/series/1/episodes/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.episode_list_schema, data) assert not errors assert len(data) == 2 # Delete all episodes rsp = api_client.delete('/series/1/episodes/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors rsp = api_client.get('/series/1/episodes/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.episode_list_schema, data) assert not errors assert len(data) == 0 class TestSeriesEpisodeAPI(object): config = """ tasks: {} """ def test_episode(self, api_client, schema_match): with Session() as session: series = Series() series.name = 'test series 1' session.add(series) task = SeriesTask('test task') series.in_tasks = [task] episode1 = Episode() episode1.identifier = 'S01E01' episode1.identified_by = 'ep' episode1.season = 1 episode1.number = 1 episode1.series_id = series.id episode2 = Episode() episode2.identifier = 'S01E01' episode2.identified_by = 'ep' episode2.season = 1 episode2.number = 1 episode2.series_id = series.id release = EpisodeRelease() release.title = 'test release' release.downloaded = True episode1.releases = [release] series.episodes.append(episode1) series.episodes.append(episode2) series2 = Series() series2.name = 'test series 2' session.add(series2) rsp = api_client.get('/series/1/episodes/1/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.episode_object, data) assert not errors assert data['identifier'] == 'S01E01' assert data['identified_by'] == 'ep' assert data['season'] == 1 assert data['number'] == 1 assert data['premiere'] == 'Series Premiere' # No series ID rsp = api_client.get('/series/10/episodes/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # No episode ID rsp = api_client.get('/series/1/episodes/10/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Episode does not belong to series rsp = api_client.get('/series/2/episodes/1/') assert rsp.status_code == 400, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Episode does not belong to series rsp = api_client.delete('/series/2/episodes/1/') assert rsp.status_code == 400, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Delete rsp = api_client.delete('/series/1/episodes/1/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors rsp = api_client.get('/series/1/episodes/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors rsp = api_client.delete('/series/1/episodes/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors rsp = api_client.delete('/series/10/episodes/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors class TestSeriesReleasesAPI(object): config = """ tasks: {} """ def test_releases_get(self, api_client, schema_match): with Session() as session: series = Series() series.name = 'test series 1' session.add(series) episode1 = Episode() episode1.identifier = 'S01E01' episode1.identified_by = 'ep' episode1.season = 1 episode1.number = 1 episode1.series_id = series.id release1 = EpisodeRelease() release1.title = 'downloaded release' release1.downloaded = True release2 = EpisodeRelease() release2.title = 'un-downloaded release' release2.downloaded = False episode1.releases = [release1, release2] series.episodes.append(episode1) series2 = Series() series2.name = 'test series 2' session.add(series2) episode2 = Episode() episode2.identifier = 'S01E02' episode2.identified_by = 'ep' episode2.season = 1 episode2.number = 2 episode2.series_id = series2.id series2.episodes.append(episode2) rsp = api_client.get('/series/1/episodes/1/releases/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.release_list_schema, data) assert not errors assert len(data) == 2 # Just downloaded releases rsp = api_client.get('/series/1/episodes/1/releases/?downloaded=true') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.release_list_schema, data) assert not errors assert len(data) == 1 assert data[0]['title'] == 'downloaded release' # Just un-downloaded releases rsp = api_client.get('/series/1/episodes/1/releases/?downloaded=false') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.release_list_schema, data) assert not errors assert len(data) == 1 assert data[0]['title'] == 'un-downloaded release' # No series rsp = api_client.get('/series/10/episodes/1/releases/?downloaded=false') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # No episode for series rsp = api_client.get('/series/1/episodes/10/releases/?downloaded=false') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Episode does not belong to series rsp = api_client.get('/series/2/episodes/1/releases/?downloaded=false') assert rsp.status_code == 400, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors def test_releases_delete(self, api_client, schema_match): with Session() as session: series = Series() series.name = 'test series 1' session.add(series) episode1 = Episode() episode1.identifier = 'S01E01' episode1.identified_by = 'ep' episode1.season = 1 episode1.number = 1 episode1.series_id = series.id release1 = EpisodeRelease() release1.title = 'downloaded release' release1.downloaded = True release2 = EpisodeRelease() release2.title = 'un-downloaded release' release2.downloaded = False episode1.releases = [release1, release2] series.episodes.append(episode1) series2 = Series() series2.name = 'test series 2' session.add(series2) episode2 = Episode() episode2.identifier = 'S01E02' episode2.identified_by = 'ep' episode2.season = 1 episode2.number = 2 episode2.series_id = series2.id series2.episodes.append(episode2) rsp = api_client.delete('/series/1/episodes/1/releases/?downloaded=true') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors rsp = api_client.get('/series/1/episodes/1/releases/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.release_list_schema, data) assert not errors assert len(data) == 1 assert data[0]['title'] == 'un-downloaded release' rsp = api_client.delete('/series/1/episodes/1/releases/?downloaded=false') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors rsp = api_client.get('/series/1/episodes/1/releases/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.release_list_schema, data) assert not errors assert len(data) == 0 # No series rsp = api_client.delete('/series/10/episodes/1/releases/?downloaded=false') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # No episode for series rsp = api_client.delete('/series/1/episodes/10/releases/?downloaded=false') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Episode does not belong to series rsp = api_client.delete('/series/2/episodes/1/releases/?downloaded=false') assert rsp.status_code == 400, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors def test_releases_put(self, api_client, schema_match): with Session() as session: series = Series() series.name = 'test series 1' session.add(series) episode1 = Episode() episode1.identifier = 'S01E01' episode1.identified_by = 'ep' episode1.season = 1 episode1.number = 1 episode1.series_id = series.id release1 = EpisodeRelease() release1.title = 'downloaded release' release1.downloaded = True release2 = EpisodeRelease() release2.title = 'un-downloaded release' release2.downloaded = False episode1.releases = [release1, release2] series.episodes.append(episode1) series2 = Series() series2.name = 'test series 2' session.add(series2) episode2 = Episode() episode2.identifier = 'S01E02' episode2.identified_by = 'ep' episode2.season = 1 episode2.number = 2 episode2.series_id = series2.id series2.episodes.append(episode2) rsp = api_client.json_put('/series/1/episodes/1/releases/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors rsp = api_client.get('/series/1/episodes/1/releases/?downloaded=true') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.release_list_schema, data) assert not errors assert len(data) == 0 rsp = api_client.get('/series/1/episodes/1/releases/?downloaded=false') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.release_list_schema, data) assert not errors assert len(data) == 2 # No series rsp = api_client.json_put('/series/10/episodes/1/releases/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # No episode for series rsp = api_client.json_put('/series/1/episodes/10/releases/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Episode does not belong to series rsp = api_client.json_put('/series/2/episodes/1/releases/') assert rsp.status_code == 400, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors class TestSeriesReleaseAPI(object): config = """ tasks: {} """ def test_release_get(self, api_client, schema_match): with Session() as session: series = Series() series.name = 'test series 1' session.add(series) episode1 = Episode() episode1.identifier = 'S01E01' episode1.identified_by = 'ep' episode1.season = 1 episode1.number = 1 episode1.series_id = series.id episode2 = Episode() episode2.identifier = 'S01E02' episode2.identified_by = 'ep' episode2.season = 1 episode2.number = 2 episode2.series_id = series.id release1 = EpisodeRelease() release1.title = 'downloaded release' release1.downloaded = True release2 = EpisodeRelease() release2.title = 'un-downloaded release' release2.downloaded = False release3 = EpisodeRelease() release3.title = 'downloaded release' release3.downloaded = True episode1.releases = [release1, release2] episode2.releases = [release3] series.episodes.append(episode1) series.episodes.append(episode2) series2 = Series() series2.name = 'test series 2' session.add(series2) episode2 = Episode() episode2.identifier = 'S01E02' episode2.identified_by = 'ep' episode2.season = 1 episode2.number = 2 episode2.series_id = series2.id series2.episodes.append(episode2) rsp = api_client.get('/series/1/episodes/1/releases/1/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.release_object, data) assert not errors assert data['downloaded'] is True assert data['title'] == 'downloaded release' rsp = api_client.get('/series/1/episodes/1/releases/2/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.release_object, data) assert not errors assert data['downloaded'] is False assert data['title'] == 'un-downloaded release' # No series rsp = api_client.get('/series/10/episodes/1/releases/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # No episode for series rsp = api_client.get('/series/1/episodes/10/releases/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Episode does not belong to series rsp = api_client.get('/series/2/episodes/1/releases/1/') assert rsp.status_code == 400, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # No release rsp = api_client.get('/series/1/episodes/1/releases/10/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Release does not belong to episode rsp = api_client.get('/series/1/episodes/1/releases/3/') assert rsp.status_code == 400, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors def test_release_put(self, api_client, schema_match): with Session() as session: series = Series() series.name = 'test series 1' session.add(series) episode1 = Episode() episode1.identifier = 'S01E01' episode1.identified_by = 'ep' episode1.season = 1 episode1.number = 1 episode1.series_id = series.id episode2 = Episode() episode2.identifier = 'S01E02' episode2.identified_by = 'ep' episode2.season = 1 episode2.number = 2 episode2.series_id = series.id release1 = EpisodeRelease() release1.title = 'downloaded release' release1.downloaded = True release2 = EpisodeRelease() release2.title = 'un-downloaded release' release2.downloaded = False release3 = EpisodeRelease() release3.title = 'downloaded release' release3.downloaded = True episode1.releases = [release1, release2] episode2.releases = [release3] series.episodes.append(episode1) series.episodes.append(episode2) series2 = Series() series2.name = 'test series 2' session.add(series2) episode2 = Episode() episode2.identifier = 'S01E02' episode2.identified_by = 'ep' episode2.season = 1 episode2.number = 2 episode2.series_id = series2.id series2.episodes.append(episode2) # No series rsp = api_client.json_put('/series/10/episodes/1/releases/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors rsp = api_client.json_put('/series/1/episodes/1/releases/1/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.release_object, data) assert not errors # Cannot reset if already downloaded rsp = api_client.json_put('/series/1/episodes/1/releases/1/') assert rsp.status_code == 400, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # No episode for series rsp = api_client.json_put('/series/1/episodes/10/releases/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Episode does not belong to series rsp = api_client.json_put('/series/2/episodes/1/releases/1/') assert rsp.status_code == 400, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # No release rsp = api_client.json_put('/series/1/episodes/1/releases/10/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Release does not belong to episode rsp = api_client.json_put('/series/1/episodes/1/releases/3/') assert rsp.status_code == 400, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors def test_release_delete(self, api_client, schema_match): with Session() as session: series = Series() series.name = 'test series 1' session.add(series) episode1 = Episode() episode1.identifier = 'S01E01' episode1.identified_by = 'ep' episode1.season = 1 episode1.number = 1 episode1.series_id = series.id episode2 = Episode() episode2.identifier = 'S01E02' episode2.identified_by = 'ep' episode2.season = 1 episode2.number = 2 episode2.series_id = series.id release1 = EpisodeRelease() release1.title = 'downloaded release' release1.downloaded = True release2 = EpisodeRelease() release2.title = 'un-downloaded release' release2.downloaded = False release3 = EpisodeRelease() release3.title = 'downloaded release' release3.downloaded = True episode1.releases = [release1, release2] episode2.releases = [release3] series.episodes.append(episode1) series.episodes.append(episode2) series2 = Series() series2.name = 'test series 2' session.add(series2) episode3 = Episode() episode3.identifier = 'S01E02' episode3.identified_by = 'ep' episode3.season = 1 episode3.number = 2 episode3.series_id = series2.id release4 = EpisodeRelease() release4.title = 'downloaded release' release4.downloaded = True episode3.releases = [release4] series2.episodes.append(episode3) # No series rsp = api_client.delete('/series/10/episodes/1/releases/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # No episode for series rsp = api_client.delete('/series/1/episodes/10/releases/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Episode does not belong to series rsp = api_client.delete('/series/2/episodes/1/releases/1/') assert rsp.status_code == 400, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # No release rsp = api_client.delete('/series/1/episodes/1/releases/10/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Release does not belong to episode rsp = api_client.delete('/series/1/episodes/1/releases/3/') assert rsp.status_code == 400, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors rsp = api_client.delete('/series/1/episodes/1/releases/1/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Already deleted rsp = api_client.delete('/series/1/episodes/1/releases/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors class TestSeriesForgetFlag(object): config = """ tasks: series_data: mock: - {title: 'series.foo.s01e01.720p.hdtv-flexget'} - {title: 'series.foo.s01e01.1080p.hdtv-flexget'} - {title: 'series.foo.s01e02.720p.hdtv-flexget'} - {title: 'series.foo.s01e02.1080p.hdtv-flexget'} series: - series foo: qualities: - 720p - 1080p """ def test_delete_series_with_forget_flag(self, execute_task, api_client, schema_match): task = execute_task('series_data') assert len(task.accepted) == 4 # Get series rsp = api_client.get('/series/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert len(data) == 1 # Get seen object with Session() as session: seen = session.query(SeenEntry).all() assert len(seen) == 4 # Delete with forget flag rsp = api_client.delete('/series/1/?forget=true') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Get series rsp = api_client.get('/series/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.series_list_schema, data) assert not errors assert len(data) == 0 # Get seen object with Session() as session: seen = session.query(SeenEntry).all() assert len(seen) == 0 def test_delete_series_episode_with_forget_flag(self, execute_task, api_client, schema_match): task = execute_task('series_data') assert len(task.accepted) == 4 # Get episode 1 rsp = api_client.get('/series/1/episodes/1/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.episode_object, data) assert not errors # Get episode 2 rsp = api_client.get('/series/1/episodes/2/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.episode_object, data) assert not errors # Get seen object with Session() as session: seen = session.query(SeenEntry).all() assert len(seen) == 4 # Delete with forget flag rsp = api_client.delete('/series/1/episodes/1/?forget=true') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Get episode 1 rsp = api_client.get('/series/1/episodes/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors # Get episode 2 rsp = api_client.get('/series/1/episodes/2/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.episode_object, data) assert not errors # Get seen object with Session() as session: seen = session.query(SeenEntry).all() assert len(seen) == 2 def test_delete_series_release_with_forget_flag(self, execute_task, api_client, schema_match): task = execute_task('series_data') assert len(task.accepted) == 4 # Get release 1 for episode 1 rsp = api_client.get('/series/1/episodes/1/releases/1/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(OC.release_object, data) assert not errors # Get seen object with Session() as session: seen = session.query(SeenEntry).all() assert len(seen) == 4 rsp = api_client.delete('/series/1/episodes/1/releases/1/?forget=true') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors rsp = api_client.get('/series/1/episodes/1/releases/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) errors = schema_match(base_message, data) assert not errors with Session() as session: seen = session.query(SeenEntry).all() assert len(seen) == 3 class TestSeriesPagination(object): config = 'tasks: {}' def test_series_pagination(self, api_client, link_headers): number_of_series = 200 with Session() as session: for i in range(number_of_series): series = Series() session.add(series) series.name = 'test series {}'.format(i) task = SeriesTask('test task') series.in_tasks = [task] # Default values rsp = api_client.get('/series/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) assert len(data) == 50 # Default page size assert int(rsp.headers['total-count']) == 200 assert int(rsp.headers['count']) == 50 links = link_headers(rsp) assert links['last']['page'] == 4 assert links['next']['page'] == 2 # Change page size rsp = api_client.get('/series/?per_page=100') assert rsp.status_code == 200 data = json.loads(rsp.get_data(as_text=True)) assert len(data) == 100 assert int(rsp.headers['total-count']) == 200 assert int(rsp.headers['count']) == 100 links = link_headers(rsp) assert links['last']['page'] == 2 assert links['next']['page'] == 2 # Get different page rsp = api_client.get('series/?page=2') assert rsp.status_code == 200 data = json.loads(rsp.get_data(as_text=True)) assert len(data) == 50 assert int(rsp.headers['total-count']) == 200 assert int(rsp.headers['count']) == 50 links = link_headers(rsp) assert links['last']['page'] == 4 assert links['next']['page'] == 3 assert links['prev']['page'] == 1 def test_episodes_pagination(self, api_client, link_headers): number_of_episodes = 200 with Session() as session: series = Series() session.add(series) series.name = 'test series' task = SeriesTask('test task') series.in_tasks = [task] for i in range(number_of_episodes): episode = Episode() episode.identifier = 'S01E0{}'.format(i) episode.identified_by = 'ep' episode.season = 1 episode.number = i episode.series_id = series.id series.episodes.append(episode) # Default values rsp = api_client.get('/series/1/episodes/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) assert len(data) == 50 # Default page size assert int(rsp.headers['total-count']) == 200 assert int(rsp.headers['count']) == 50 assert int(rsp.headers['Series-ID']) == 1 links = link_headers(rsp) assert links['last']['page'] == 4 assert links['next']['page'] == 2 # Change page size rsp = api_client.get('/series/1/episodes/?per_page=100') assert rsp.status_code == 200 data = json.loads(rsp.get_data(as_text=True)) assert len(data) == 100 assert int(rsp.headers['total-count']) == 200 assert int(rsp.headers['count']) == 100 assert int(rsp.headers['Series-ID']) == 1 links = link_headers(rsp) assert links['last']['page'] == 2 assert links['next']['page'] == 2 # Get different page rsp = api_client.get('series/1/episodes/?page=2') assert rsp.status_code == 200 data = json.loads(rsp.get_data(as_text=True)) assert len(data) == 50 assert int(rsp.headers['total-count']) == 200 assert int(rsp.headers['count']) == 50 assert int(rsp.headers['Series-ID']) == 1 links = link_headers(rsp) assert links['last']['page'] == 4 assert links['next']['page'] == 3 assert links['prev']['page'] == 1 def test_releases_pagination(self, api_client, link_headers): number_of_releases = 200 with Session() as session: series = Series() session.add(series) series.name = 'test series' task = SeriesTask('test task') series.in_tasks = [task] episode = Episode() episode.identifier = 'S01E01' episode.identified_by = 'ep' episode.season = 1 episode.number = 1 episode.series_id = series.id series.episodes.append(episode) for i in range(number_of_releases): release = EpisodeRelease() release.title = 'test release {}'.format(i) release.downloaded = True episode.releases.append(release) # Default values rsp = api_client.get('/series/1/episodes/1/releases/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code data = json.loads(rsp.get_data(as_text=True)) assert len(data) == 50 # Default page size assert int(rsp.headers['total-count']) == 200 assert int(rsp.headers['count']) == 50 assert int(rsp.headers['Series-ID']) == 1 assert int(rsp.headers['Episode-ID']) == 1 links = link_headers(rsp) assert links['last']['page'] == 4 assert links['next']['page'] == 2 # Change page size rsp = api_client.get('/series/1/episodes/1/releases/?per_page=100') assert rsp.status_code == 200 data = json.loads(rsp.get_data(as_text=True)) assert len(data) == 100 assert int(rsp.headers['total-count']) == 200 assert int(rsp.headers['count']) == 100 assert int(rsp.headers['Series-ID']) == 1 assert int(rsp.headers['Episode-ID']) == 1 links = link_headers(rsp) assert links['last']['page'] == 2 assert links['next']['page'] == 2 # Get different page rsp = api_client.get('series/1/episodes/1/releases/?page=2') assert rsp.status_code == 200 data = json.loads(rsp.get_data(as_text=True)) assert len(data) == 50 assert int(rsp.headers['total-count']) == 200 assert int(rsp.headers['count']) == 50 assert int(rsp.headers['Series-ID']) == 1 assert int(rsp.headers['Episode-ID']) == 1 links = link_headers(rsp) assert links['last']['page'] == 4 assert links['next']['page'] == 3 assert links['prev']['page'] == 1
qk4l/Flexget
flexget/tests/api_tests/test_series_api.py
flexget/utils/titles/movie.py
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2017-2020 Ryan Roden-Corrent (rcorre) <ryan@rcorre.net> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """A model that proxies access to one or more completion categories.""" from typing import MutableSequence from PyQt5.QtCore import Qt, QModelIndex, QAbstractItemModel from qutebrowser.utils import log, qtutils, utils from qutebrowser.api import cmdutils class CompletionModel(QAbstractItemModel): """A model that proxies access to one or more completion categories. Top level indices represent categories. Child indices represent rows of those tables. Attributes: column_widths: The width percentages of the columns used in the completion view. _categories: The sub-categories. """ def __init__(self, *, column_widths=(30, 70, 0), parent=None): super().__init__(parent) self.column_widths = column_widths self._categories: MutableSequence[QAbstractItemModel] = [] def _cat_from_idx(self, index): """Return the category pointed to by the given index. Args: idx: A QModelIndex Returns: A category if the index points at one, else None """ # items hold an index to the parent category in their internalPointer # categories have an empty internalPointer if index.isValid() and not index.internalPointer(): return self._categories[index.row()] return None def add_category(self, cat): """Add a completion category to the model.""" self._categories.append(cat) def data(self, index, role=Qt.DisplayRole): """Return the item data for index. Override QAbstractItemModel::data. Args: index: The QModelIndex to get item flags for. Return: The item data, or None on an invalid index. """ if role != Qt.DisplayRole: return None cat = self._cat_from_idx(index) if cat: # category header if index.column() == 0: return self._categories[index.row()].name return None # item cat = self._cat_from_idx(index.parent()) if not cat: return None idx = cat.index(index.row(), index.column()) return cat.data(idx) def flags(self, index): """Return the item flags for index. Override QAbstractItemModel::flags. Return: The item flags, or Qt.NoItemFlags on error. """ if not index.isValid(): return Qt.NoItemFlags if index.parent().isValid(): # item return (Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemNeverHasChildren) else: # category return Qt.NoItemFlags def index(self, row, col, parent=QModelIndex()): """Get an index into the model. Override QAbstractItemModel::index. Return: A QModelIndex. """ if (row < 0 or row >= self.rowCount(parent) or col < 0 or col >= self.columnCount(parent)): return QModelIndex() if parent.isValid(): if parent.column() != 0: return QModelIndex() # store a pointer to the parent category in internalPointer return self.createIndex(row, col, self._categories[parent.row()]) return self.createIndex(row, col, None) def parent(self, index): """Get an index to the parent of the given index. Override QAbstractItemModel::parent. Args: index: The QModelIndex to get the parent index for. """ parent_cat = index.internalPointer() if not parent_cat: # categories have no parent return QModelIndex() row = self._categories.index(parent_cat) return self.createIndex(row, 0, None) def rowCount(self, parent=QModelIndex()): """Override QAbstractItemModel::rowCount.""" if not parent.isValid(): # top-level return len(self._categories) cat = self._cat_from_idx(parent) if not cat or parent.column() != 0: # item or nonzero category column (only first col has children) return 0 else: # category return cat.rowCount() def columnCount(self, parent=QModelIndex()): """Override QAbstractItemModel::columnCount.""" utils.unused(parent) return len(self.column_widths) def canFetchMore(self, parent): """Override to forward the call to the categories.""" cat = self._cat_from_idx(parent) if cat: return cat.canFetchMore(QModelIndex()) return False def fetchMore(self, parent): """Override to forward the call to the categories.""" cat = self._cat_from_idx(parent) if cat: cat.fetchMore(QModelIndex()) def count(self): """Return the count of non-category items.""" return sum(t.rowCount() for t in self._categories) def set_pattern(self, pattern): """Set the filter pattern for all categories. Args: pattern: The filter pattern to set. """ log.completion.debug("Setting completion pattern '{}'".format(pattern)) self.layoutAboutToBeChanged.emit() # type: ignore[attr-defined] for cat in self._categories: cat.set_pattern(pattern) self.layoutChanged.emit() # type: ignore[attr-defined] def first_item(self): """Return the index of the first child (non-category) in the model.""" for row, cat in enumerate(self._categories): if cat.rowCount() > 0: parent = self.index(row, 0) index = self.index(0, 0, parent) qtutils.ensure_valid(index) return index return QModelIndex() def last_item(self): """Return the index of the last child (non-category) in the model.""" for row, cat in reversed(list(enumerate(self._categories))): childcount = cat.rowCount() if childcount > 0: parent = self.index(row, 0) index = self.index(childcount - 1, 0, parent) qtutils.ensure_valid(index) return index return QModelIndex() def columns_to_filter(self, index): """Return the column indices the filter pattern applies to. Args: index: index of the item to check. Return: A list of integers. """ cat = self._cat_from_idx(index.parent()) return cat.columns_to_filter if cat else [] def delete_cur_item(self, index): """Delete the row at the given index.""" qtutils.ensure_valid(index) parent = index.parent() cat = self._cat_from_idx(parent) assert cat, "CompletionView sent invalid index for deletion" if not cat.delete_func: raise cmdutils.CommandError("Cannot delete this item.") data = [cat.data(cat.index(index.row(), i)) for i in range(cat.columnCount())] cat.delete_func(data) self.beginRemoveRows(parent, index.row(), index.row()) cat.removeRow(index.row(), QModelIndex()) self.endRemoveRows()
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2018-2020 Ryan Roden-Corrent (rcorre) <ryan@rcorre.net> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. from unittest import mock import pytest from PyQt5.QtCore import Qt from PyQt5.QtGui import QTextDocument, QColor from PyQt5.QtWidgets import QTextEdit from qutebrowser.completion import completiondelegate @pytest.mark.parametrize('pat,txt,segments', [ ('foo', 'foo', [(0, 3)]), ('foo', 'foobar', [(0, 3)]), ('foo', 'FOObar', [(0, 3)]), # re.IGNORECASE ('foo', 'barfoo', [(3, 3)]), ('foo', 'barfoobaz', [(3, 3)]), ('foo', 'barfoobazfoo', [(3, 3), (9, 3)]), ('foo', 'foofoo', [(0, 3), (3, 3)]), ('a b', 'cadb', [(1, 1), (3, 1)]), ('foo', '<foo>', [(1, 3)]), ('<a>', "<a>bc", [(0, 3)]), # https://github.com/qutebrowser/qutebrowser/issues/4199 ('foo', "'foo'", [(1, 3)]), ('x', "'x'", [(1, 1)]), ('lt', "<lt", [(1, 2)]), # See https://github.com/qutebrowser/qutebrowser/pull/5111 ('bar', '\U0001d65b\U0001d664\U0001d664bar', [(6, 3)]), ('an anomaly', 'an anomaly', [(0, 2), (3, 7)]), ]) def test_highlight(pat, txt, segments): doc = QTextDocument(txt) highlighter = completiondelegate._Highlighter(doc, pat, Qt.red) highlighter.setFormat = mock.Mock() highlighter.highlightBlock(txt) highlighter.setFormat.assert_has_calls([ mock.call(s[0], s[1], mock.ANY) for s in segments ]) def test_benchmark_highlight(benchmark): txt = 'boofoobar' pat = 'foo bar' doc = QTextDocument(txt) def bench(): highlighter = completiondelegate._Highlighter(doc, pat, Qt.red) highlighter.highlightBlock(txt) benchmark(bench) def test_highlighted(qtbot): """Make sure highlighting works. Note that with Qt > 5.12.1 we need to call setPlainText *after* creating the highlighter for highlighting to work. Ideally, we'd test whether CompletionItemDelegate._get_textdoc() works properly, but testing that is kind of hard, so we just test it in isolation here. """ doc = QTextDocument() completiondelegate._Highlighter(doc, 'Hello', Qt.red) doc.setPlainText('Hello World') # Needed so the highlighting actually works. edit = QTextEdit() qtbot.addWidget(edit) edit.setDocument(doc) colors = [f.foreground().color() for f in doc.allFormats()] assert QColor('red') in colors
The-Compiler/qutebrowser
tests/unit/completion/test_completiondelegate.py
qutebrowser/completion/models/completionmodel.py
# Authors: Adam Li <adam2392@gmail.com> # Alex Rockhill <aprockhill@mailbox.org> # License: BSD Style. from functools import partial from ...utils import verbose from ..utils import (has_dataset, _data_path, _data_path_doc, _get_version, _version_doc) has_epilepsy_ecog_data = partial(has_dataset, name='epilepsy_ecog') @verbose def data_path( path=None, force_update=False, update_path=True, download=True, verbose=None): # noqa: D103 return _data_path(path=path, force_update=force_update, update_path=update_path, name='epilepsy_ecog', download=download) data_path.__doc__ = _data_path_doc.format( name='epilepsy_ecog', conf='MNE_DATASETS_EPILEPSY_ECOG_PATH') def get_version(): # noqa: D103 return _get_version('epilepsy_ecog') get_version.__doc__ = _version_doc.format(name='epilepsy_ecog')
import os.path as op import gc import pytest import numpy as np from numpy.testing import (assert_array_almost_equal, assert_equal, assert_array_equal, assert_allclose) from mne.datasets import testing from mne import (read_forward_solution, apply_forward, apply_forward_raw, average_forward_solutions, write_forward_solution, convert_forward_solution, SourceEstimate, pick_types_forward, read_evokeds, VectorSourceEstimate) from mne.io import read_info from mne.label import read_label from mne.utils import requires_mne, run_subprocess from mne.forward import (restrict_forward_to_stc, restrict_forward_to_label, Forward, is_fixed_orient, compute_orient_prior, compute_depth_prior) from mne.channels import equalize_channels data_path = testing.data_path(download=False) fname_meeg = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-eeg-oct-4-fwd.fif') fname_meeg_grad = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-eeg-oct-2-grad-fwd.fif') fname_evoked = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data', 'test-ave.fif') def assert_forward_allclose(f1, f2, rtol=1e-7): """Compare two potentially converted forward solutions.""" assert_allclose(f1['sol']['data'], f2['sol']['data'], rtol=rtol) assert f1['sol']['ncol'] == f2['sol']['ncol'] assert f1['sol']['ncol'] == f1['sol']['data'].shape[1] assert_allclose(f1['source_nn'], f2['source_nn'], rtol=rtol) if f1['sol_grad'] is not None: assert (f2['sol_grad'] is not None) assert_allclose(f1['sol_grad']['data'], f2['sol_grad']['data']) assert f1['sol_grad']['ncol'] == f2['sol_grad']['ncol'] assert f1['sol_grad']['ncol'] == f1['sol_grad']['data'].shape[1] else: assert (f2['sol_grad'] is None) assert f1['source_ori'] == f2['source_ori'] assert f1['surf_ori'] == f2['surf_ori'] assert f1['src'][0]['coord_frame'] == f1['src'][0]['coord_frame'] @testing.requires_testing_data def test_convert_forward(): """Test converting forward solution between different representations.""" fwd = read_forward_solution(fname_meeg_grad) fwd_repr = repr(fwd) assert ('306' in fwd_repr) assert ('60' in fwd_repr) assert (fwd_repr) assert (isinstance(fwd, Forward)) # look at surface orientation fwd_surf = convert_forward_solution(fwd, surf_ori=True) # go back fwd_new = convert_forward_solution(fwd_surf, surf_ori=False) assert (repr(fwd_new)) assert (isinstance(fwd_new, Forward)) assert_forward_allclose(fwd, fwd_new) del fwd_new gc.collect() # now go to fixed fwd_fixed = convert_forward_solution(fwd_surf, surf_ori=True, force_fixed=True, use_cps=False) del fwd_surf gc.collect() assert (repr(fwd_fixed)) assert (isinstance(fwd_fixed, Forward)) assert (is_fixed_orient(fwd_fixed)) # now go back to cartesian (original condition) fwd_new = convert_forward_solution(fwd_fixed, surf_ori=False, force_fixed=False) assert (repr(fwd_new)) assert (isinstance(fwd_new, Forward)) assert_forward_allclose(fwd, fwd_new) del fwd, fwd_new, fwd_fixed gc.collect() @pytest.mark.slowtest @testing.requires_testing_data def test_io_forward(tmpdir): """Test IO for forward solutions.""" # do extensive tests with MEEG + grad n_channels, n_src = 366, 108 fwd = read_forward_solution(fname_meeg_grad) assert (isinstance(fwd, Forward)) fwd = read_forward_solution(fname_meeg_grad) fwd = convert_forward_solution(fwd, surf_ori=True) leadfield = fwd['sol']['data'] assert_equal(leadfield.shape, (n_channels, n_src)) assert_equal(len(fwd['sol']['row_names']), n_channels) fname_temp = tmpdir.join('test-fwd.fif') with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_temp, fwd, overwrite=True) fwd = read_forward_solution(fname_meeg_grad) fwd = convert_forward_solution(fwd, surf_ori=True) fwd_read = read_forward_solution(fname_temp) fwd_read = convert_forward_solution(fwd_read, surf_ori=True) leadfield = fwd_read['sol']['data'] assert_equal(leadfield.shape, (n_channels, n_src)) assert_equal(len(fwd_read['sol']['row_names']), n_channels) assert_equal(len(fwd_read['info']['chs']), n_channels) assert ('dev_head_t' in fwd_read['info']) assert ('mri_head_t' in fwd_read) assert_array_almost_equal(fwd['sol']['data'], fwd_read['sol']['data']) fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=False) with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_temp, fwd, overwrite=True) fwd_read = read_forward_solution(fname_temp) fwd_read = convert_forward_solution(fwd_read, surf_ori=True, force_fixed=True, use_cps=False) assert (repr(fwd_read)) assert (isinstance(fwd_read, Forward)) assert (is_fixed_orient(fwd_read)) assert_forward_allclose(fwd, fwd_read) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) leadfield = fwd['sol']['data'] assert_equal(leadfield.shape, (n_channels, 1494 / 3)) assert_equal(len(fwd['sol']['row_names']), n_channels) assert_equal(len(fwd['info']['chs']), n_channels) assert ('dev_head_t' in fwd['info']) assert ('mri_head_t' in fwd) assert (fwd['surf_ori']) with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_temp, fwd, overwrite=True) fwd_read = read_forward_solution(fname_temp) fwd_read = convert_forward_solution(fwd_read, surf_ori=True, force_fixed=True, use_cps=True) assert (repr(fwd_read)) assert (isinstance(fwd_read, Forward)) assert (is_fixed_orient(fwd_read)) assert_forward_allclose(fwd, fwd_read) fwd = read_forward_solution(fname_meeg_grad) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) leadfield = fwd['sol']['data'] assert_equal(leadfield.shape, (n_channels, n_src / 3)) assert_equal(len(fwd['sol']['row_names']), n_channels) assert_equal(len(fwd['info']['chs']), n_channels) assert ('dev_head_t' in fwd['info']) assert ('mri_head_t' in fwd) assert (fwd['surf_ori']) with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_temp, fwd, overwrite=True) fwd_read = read_forward_solution(fname_temp) fwd_read = convert_forward_solution(fwd_read, surf_ori=True, force_fixed=True, use_cps=True) assert (repr(fwd_read)) assert (isinstance(fwd_read, Forward)) assert (is_fixed_orient(fwd_read)) assert_forward_allclose(fwd, fwd_read) # test warnings on bad filenames fwd = read_forward_solution(fname_meeg_grad) fwd_badname = tmpdir.join('test-bad-name.fif.gz') with pytest.warns(RuntimeWarning, match='end with'): write_forward_solution(fwd_badname, fwd) with pytest.warns(RuntimeWarning, match='end with'): read_forward_solution(fwd_badname) fwd = read_forward_solution(fname_meeg) write_forward_solution(fname_temp, fwd, overwrite=True) fwd_read = read_forward_solution(fname_temp) assert_forward_allclose(fwd, fwd_read) @testing.requires_testing_data def test_apply_forward(): """Test projection of source space data to sensor space.""" start = 0 stop = 5 n_times = stop - start - 1 sfreq = 10.0 t_start = 0.123 fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) fwd = pick_types_forward(fwd, meg=True) assert isinstance(fwd, Forward) vertno = [fwd['src'][0]['vertno'], fwd['src'][1]['vertno']] stc_data = np.ones((len(vertno[0]) + len(vertno[1]), n_times)) stc = SourceEstimate(stc_data, vertno, tmin=t_start, tstep=1.0 / sfreq) gain_sum = np.sum(fwd['sol']['data'], axis=1) # Evoked evoked = read_evokeds(fname_evoked, condition=0) evoked.pick_types(meg=True) with pytest.warns(RuntimeWarning, match='only .* positive values'): evoked = apply_forward(fwd, stc, evoked.info, start=start, stop=stop) data = evoked.data times = evoked.times # do some tests assert_array_almost_equal(evoked.info['sfreq'], sfreq) assert_array_almost_equal(np.sum(data, axis=1), n_times * gain_sum) assert_array_almost_equal(times[0], t_start) assert_array_almost_equal(times[-1], t_start + (n_times - 1) / sfreq) # vector stc_vec = VectorSourceEstimate( fwd['source_nn'][:, :, np.newaxis] * stc.data[:, np.newaxis], stc.vertices, stc.tmin, stc.tstep) with pytest.warns(RuntimeWarning, match='very large'): evoked_2 = apply_forward(fwd, stc_vec, evoked.info) assert np.abs(evoked_2.data).mean() > 1e-5 assert_allclose(evoked.data, evoked_2.data, atol=1e-10) # Raw with pytest.warns(RuntimeWarning, match='only .* positive values'): raw_proj = apply_forward_raw(fwd, stc, evoked.info, start=start, stop=stop) data, times = raw_proj[:, :] # do some tests assert_array_almost_equal(raw_proj.info['sfreq'], sfreq) assert_array_almost_equal(np.sum(data, axis=1), n_times * gain_sum) atol = 1. / sfreq assert_allclose(raw_proj.first_samp / sfreq, t_start, atol=atol) assert_allclose(raw_proj.last_samp / sfreq, t_start + (n_times - 1) / sfreq, atol=atol) @testing.requires_testing_data def test_restrict_forward_to_stc(tmpdir): """Test restriction of source space to source SourceEstimate.""" start = 0 stop = 5 n_times = stop - start - 1 sfreq = 10.0 t_start = 0.123 fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) fwd = pick_types_forward(fwd, meg=True) vertno = [fwd['src'][0]['vertno'][0:15], fwd['src'][1]['vertno'][0:5]] stc_data = np.ones((len(vertno[0]) + len(vertno[1]), n_times)) stc = SourceEstimate(stc_data, vertno, tmin=t_start, tstep=1.0 / sfreq) fwd_out = restrict_forward_to_stc(fwd, stc) assert (isinstance(fwd_out, Forward)) assert_equal(fwd_out['sol']['ncol'], 20) assert_equal(fwd_out['src'][0]['nuse'], 15) assert_equal(fwd_out['src'][1]['nuse'], 5) assert_equal(fwd_out['src'][0]['vertno'], fwd['src'][0]['vertno'][0:15]) assert_equal(fwd_out['src'][1]['vertno'], fwd['src'][1]['vertno'][0:5]) fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=False) fwd = pick_types_forward(fwd, meg=True) vertno = [fwd['src'][0]['vertno'][0:15], fwd['src'][1]['vertno'][0:5]] stc_data = np.ones((len(vertno[0]) + len(vertno[1]), n_times)) stc = SourceEstimate(stc_data, vertno, tmin=t_start, tstep=1.0 / sfreq) fwd_out = restrict_forward_to_stc(fwd, stc) assert_equal(fwd_out['sol']['ncol'], 60) assert_equal(fwd_out['src'][0]['nuse'], 15) assert_equal(fwd_out['src'][1]['nuse'], 5) assert_equal(fwd_out['src'][0]['vertno'], fwd['src'][0]['vertno'][0:15]) assert_equal(fwd_out['src'][1]['vertno'], fwd['src'][1]['vertno'][0:5]) # Test saving the restricted forward object. This only works if all fields # are properly accounted for. fname_copy = tmpdir.join('copy-fwd.fif') with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_copy, fwd_out, overwrite=True) fwd_out_read = read_forward_solution(fname_copy) fwd_out_read = convert_forward_solution(fwd_out_read, surf_ori=True, force_fixed=False) assert_forward_allclose(fwd_out, fwd_out_read) @testing.requires_testing_data def test_restrict_forward_to_label(tmpdir): """Test restriction of source space to label.""" fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) fwd = pick_types_forward(fwd, meg=True) label_path = op.join(data_path, 'MEG', 'sample', 'labels') labels = ['Aud-lh', 'Vis-rh'] label_lh = read_label(op.join(label_path, labels[0] + '.label')) label_rh = read_label(op.join(label_path, labels[1] + '.label')) fwd_out = restrict_forward_to_label(fwd, [label_lh, label_rh]) src_sel_lh = np.intersect1d(fwd['src'][0]['vertno'], label_lh.vertices) src_sel_lh = np.searchsorted(fwd['src'][0]['vertno'], src_sel_lh) vertno_lh = fwd['src'][0]['vertno'][src_sel_lh] nuse_lh = fwd['src'][0]['nuse'] src_sel_rh = np.intersect1d(fwd['src'][1]['vertno'], label_rh.vertices) src_sel_rh = np.searchsorted(fwd['src'][1]['vertno'], src_sel_rh) vertno_rh = fwd['src'][1]['vertno'][src_sel_rh] src_sel_rh += nuse_lh assert_equal(fwd_out['sol']['ncol'], len(src_sel_lh) + len(src_sel_rh)) assert_equal(fwd_out['src'][0]['nuse'], len(src_sel_lh)) assert_equal(fwd_out['src'][1]['nuse'], len(src_sel_rh)) assert_equal(fwd_out['src'][0]['vertno'], vertno_lh) assert_equal(fwd_out['src'][1]['vertno'], vertno_rh) fwd = read_forward_solution(fname_meeg) fwd = pick_types_forward(fwd, meg=True) label_path = op.join(data_path, 'MEG', 'sample', 'labels') labels = ['Aud-lh', 'Vis-rh'] label_lh = read_label(op.join(label_path, labels[0] + '.label')) label_rh = read_label(op.join(label_path, labels[1] + '.label')) fwd_out = restrict_forward_to_label(fwd, [label_lh, label_rh]) src_sel_lh = np.intersect1d(fwd['src'][0]['vertno'], label_lh.vertices) src_sel_lh = np.searchsorted(fwd['src'][0]['vertno'], src_sel_lh) vertno_lh = fwd['src'][0]['vertno'][src_sel_lh] nuse_lh = fwd['src'][0]['nuse'] src_sel_rh = np.intersect1d(fwd['src'][1]['vertno'], label_rh.vertices) src_sel_rh = np.searchsorted(fwd['src'][1]['vertno'], src_sel_rh) vertno_rh = fwd['src'][1]['vertno'][src_sel_rh] src_sel_rh += nuse_lh assert_equal(fwd_out['sol']['ncol'], 3 * (len(src_sel_lh) + len(src_sel_rh))) assert_equal(fwd_out['src'][0]['nuse'], len(src_sel_lh)) assert_equal(fwd_out['src'][1]['nuse'], len(src_sel_rh)) assert_equal(fwd_out['src'][0]['vertno'], vertno_lh) assert_equal(fwd_out['src'][1]['vertno'], vertno_rh) # Test saving the restricted forward object. This only works if all fields # are properly accounted for. fname_copy = tmpdir.join('copy-fwd.fif') write_forward_solution(fname_copy, fwd_out, overwrite=True) fwd_out_read = read_forward_solution(fname_copy) assert_forward_allclose(fwd_out, fwd_out_read) @testing.requires_testing_data @requires_mne def test_average_forward_solution(tmpdir): """Test averaging forward solutions.""" fwd = read_forward_solution(fname_meeg) # input not a list pytest.raises(TypeError, average_forward_solutions, 1) # list is too short pytest.raises(ValueError, average_forward_solutions, []) # negative weights pytest.raises(ValueError, average_forward_solutions, [fwd, fwd], [-1, 0]) # all zero weights pytest.raises(ValueError, average_forward_solutions, [fwd, fwd], [0, 0]) # weights not same length pytest.raises(ValueError, average_forward_solutions, [fwd, fwd], [0, 0, 0]) # list does not only have all dict() pytest.raises(TypeError, average_forward_solutions, [1, fwd]) # try an easy case fwd_copy = average_forward_solutions([fwd]) assert (isinstance(fwd_copy, Forward)) assert_array_equal(fwd['sol']['data'], fwd_copy['sol']['data']) # modify a fwd solution, save it, use MNE to average with old one fwd_copy['sol']['data'] *= 0.5 fname_copy = str(tmpdir.join('copy-fwd.fif')) write_forward_solution(fname_copy, fwd_copy, overwrite=True) cmd = ('mne_average_forward_solutions', '--fwd', fname_meeg, '--fwd', fname_copy, '--out', fname_copy) run_subprocess(cmd) # now let's actually do it, with one filename and one fwd fwd_ave = average_forward_solutions([fwd, fwd_copy]) assert_array_equal(0.75 * fwd['sol']['data'], fwd_ave['sol']['data']) # fwd_ave_mne = read_forward_solution(fname_copy) # assert_array_equal(fwd_ave_mne['sol']['data'], fwd_ave['sol']['data']) # with gradient fwd = read_forward_solution(fname_meeg_grad) fwd_ave = average_forward_solutions([fwd, fwd]) assert_forward_allclose(fwd, fwd_ave) @testing.requires_testing_data def test_priors(): """Test prior computations.""" # Depth prior fwd = read_forward_solution(fname_meeg) assert not is_fixed_orient(fwd) n_sources = fwd['nsource'] info = read_info(fname_evoked) depth_prior = compute_depth_prior(fwd, info, exp=0.8) assert depth_prior.shape == (3 * n_sources,) depth_prior = compute_depth_prior(fwd, info, exp=0.) assert_array_equal(depth_prior, 1.) with pytest.raises(ValueError, match='must be "whiten"'): compute_depth_prior(fwd, info, limit_depth_chs='foo') with pytest.raises(ValueError, match='noise_cov must be a Covariance'): compute_depth_prior(fwd, info, limit_depth_chs='whiten') fwd_fixed = convert_forward_solution(fwd, force_fixed=True) depth_prior = compute_depth_prior(fwd_fixed, info=info) assert depth_prior.shape == (n_sources,) # Orientation prior orient_prior = compute_orient_prior(fwd, 1.) assert_array_equal(orient_prior, 1.) orient_prior = compute_orient_prior(fwd_fixed, 0.) assert_array_equal(orient_prior, 1.) with pytest.raises(ValueError, match='oriented in surface coordinates'): compute_orient_prior(fwd, 0.5) fwd_surf_ori = convert_forward_solution(fwd, surf_ori=True) orient_prior = compute_orient_prior(fwd_surf_ori, 0.5) assert all(np.in1d(orient_prior, (0.5, 1.))) with pytest.raises(ValueError, match='between 0 and 1'): compute_orient_prior(fwd_surf_ori, -0.5) with pytest.raises(ValueError, match='with fixed orientation'): compute_orient_prior(fwd_fixed, 0.5) @testing.requires_testing_data def test_equalize_channels(): """Test equalization of channels for instances of Forward.""" fwd1 = read_forward_solution(fname_meeg) fwd1.pick_channels(['EEG 001', 'EEG 002', 'EEG 003']) fwd2 = fwd1.copy().pick_channels(['EEG 002', 'EEG 001'], ordered=True) fwd1, fwd2 = equalize_channels([fwd1, fwd2]) assert fwd1.ch_names == ['EEG 001', 'EEG 002'] assert fwd2.ch_names == ['EEG 001', 'EEG 002']
rkmaddox/mne-python
mne/forward/tests/test_forward.py
mne/datasets/epilepsy_ecog/_data.py
# Authors: Denis A. Engemann <denis.engemann@gmail.com> # Teon Brooks <teon.brooks@gmail.com> # # simplified BSD-3 license import datetime import time import numpy as np from .egimff import _read_raw_egi_mff from .events import _combine_triggers from ..base import BaseRaw from ..utils import _read_segments_file, _create_chs from ..meas_info import _empty_info from ..constants import FIFF from ...utils import verbose, logger, warn, _validate_type, _check_fname def _read_header(fid): """Read EGI binary header.""" version = np.fromfile(fid, '<i4', 1)[0] if version > 6 & ~np.bitwise_and(version, 6): version = version.byteswap().astype(np.uint32) else: raise ValueError('Watchout. This does not seem to be a simple ' 'binary EGI file.') def my_fread(*x, **y): return np.fromfile(*x, **y)[0] info = dict( version=version, year=my_fread(fid, '>i2', 1), month=my_fread(fid, '>i2', 1), day=my_fread(fid, '>i2', 1), hour=my_fread(fid, '>i2', 1), minute=my_fread(fid, '>i2', 1), second=my_fread(fid, '>i2', 1), millisecond=my_fread(fid, '>i4', 1), samp_rate=my_fread(fid, '>i2', 1), n_channels=my_fread(fid, '>i2', 1), gain=my_fread(fid, '>i2', 1), bits=my_fread(fid, '>i2', 1), value_range=my_fread(fid, '>i2', 1) ) unsegmented = 1 if np.bitwise_and(version, 1) == 0 else 0 precision = np.bitwise_and(version, 6) if precision == 0: raise RuntimeError('Floating point precision is undefined.') if unsegmented: info.update(dict(n_categories=0, n_segments=1, n_samples=np.fromfile(fid, '>i4', 1)[0], n_events=np.fromfile(fid, '>i2', 1)[0], event_codes=[], category_names=[], category_lengths=[], pre_baseline=0)) for event in range(info['n_events']): event_codes = ''.join(np.fromfile(fid, 'S1', 4).astype('U1')) info['event_codes'].append(event_codes) else: raise NotImplementedError('Only continuous files are supported') info['unsegmented'] = unsegmented info['dtype'], info['orig_format'] = {2: ('>i2', 'short'), 4: ('>f4', 'float'), 6: ('>f8', 'double')}[precision] info['dtype'] = np.dtype(info['dtype']) return info def _read_events(fid, info): """Read events.""" events = np.zeros([info['n_events'], info['n_segments'] * info['n_samples']]) fid.seek(36 + info['n_events'] * 4, 0) # skip header for si in range(info['n_samples']): # skip data channels fid.seek(info['n_channels'] * info['dtype'].itemsize, 1) # read event channels events[:, si] = np.fromfile(fid, info['dtype'], info['n_events']) return events @verbose def read_raw_egi(input_fname, eog=None, misc=None, include=None, exclude=None, preload=False, channel_naming='E%d', verbose=None): """Read EGI simple binary as raw object. .. note:: This function attempts to create a synthetic trigger channel. See the Notes section below. Parameters ---------- input_fname : path-like Path to the raw file. Files with an extension .mff are automatically considered to be EGI's native MFF format files. eog : list or tuple Names of channels or list of indices that should be designated EOG channels. Default is None. misc : list or tuple Names of channels or list of indices that should be designated MISC channels. Default is None. include : None | list The event channels to be ignored when creating the synthetic trigger. Defaults to None. Note. Overrides ``exclude`` parameter. exclude : None | list The event channels to be ignored when creating the synthetic trigger. Defaults to None. If None, channels that have more than one event and the ``sync`` and ``TREV`` channels will be ignored. %(preload)s .. versionadded:: 0.11 channel_naming : str Channel naming convention for the data channels. Defaults to 'E%%d' (resulting in channel names 'E1', 'E2', 'E3'...). The effective default prior to 0.14.0 was 'EEG %%03d'. .. versionadded:: 0.14.0 %(verbose)s Returns ------- raw : instance of RawEGI A Raw object containing EGI data. See Also -------- mne.io.Raw : Documentation of attribute and methods. Notes ----- The trigger channel names are based on the arbitrary user dependent event codes used. However this function will attempt to generate a **synthetic trigger channel** named ``STI 014`` in accordance with the general Neuromag / MNE naming pattern. The event_id assignment equals ``np.arange(n_events) + 1``. The resulting ``event_id`` mapping is stored as attribute to the resulting raw object but will be ignored when saving to a fiff. Note. The trigger channel is artificially constructed based on timestamps received by the Netstation. As a consequence, triggers have only short durations. This step will fail if events are not mutually exclusive. """ _validate_type(input_fname, 'path-like', 'input_fname') input_fname = str(input_fname) if input_fname.endswith('.mff'): return _read_raw_egi_mff(input_fname, eog, misc, include, exclude, preload, channel_naming, verbose) return RawEGI(input_fname, eog, misc, include, exclude, preload, channel_naming, verbose) class RawEGI(BaseRaw): """Raw object from EGI simple binary file.""" @verbose def __init__(self, input_fname, eog=None, misc=None, include=None, exclude=None, preload=False, channel_naming='E%d', verbose=None): # noqa: D102 input_fname = _check_fname(input_fname, 'read', True, 'input_fname') if eog is None: eog = [] if misc is None: misc = [] with open(input_fname, 'rb') as fid: # 'rb' important for py3k logger.info('Reading EGI header from %s...' % input_fname) egi_info = _read_header(fid) logger.info(' Reading events ...') egi_events = _read_events(fid, egi_info) # update info + jump if egi_info['value_range'] != 0 and egi_info['bits'] != 0: cal = egi_info['value_range'] / 2. ** egi_info['bits'] else: cal = 1e-6 logger.info(' Assembling measurement info ...') event_codes = [] if egi_info['n_events'] > 0: event_codes = list(egi_info['event_codes']) if include is None: exclude_list = ['sync', 'TREV'] if exclude is None else exclude exclude_inds = [i for i, k in enumerate(event_codes) if k in exclude_list] more_excludes = [] if exclude is None: for ii, event in enumerate(egi_events): if event.sum() <= 1 and event_codes[ii]: more_excludes.append(ii) if len(exclude_inds) + len(more_excludes) == len(event_codes): warn('Did not find any event code with more than one ' 'event.', RuntimeWarning) else: exclude_inds.extend(more_excludes) exclude_inds.sort() include_ = [i for i in np.arange(egi_info['n_events']) if i not in exclude_inds] include_names = [k for i, k in enumerate(event_codes) if i in include_] else: include_ = [i for i, k in enumerate(event_codes) if k in include] include_names = include for kk, v in [('include', include_names), ('exclude', exclude)]: if isinstance(v, list): for k in v: if k not in event_codes: raise ValueError('Could find event named "%s"' % k) elif v is not None: raise ValueError('`%s` must be None or of type list' % kk) event_ids = np.arange(len(include_)) + 1 logger.info(' Synthesizing trigger channel "STI 014" ...') logger.info(' Excluding events {%s} ...' % ", ".join([k for i, k in enumerate(event_codes) if i not in include_])) egi_info['new_trigger'] = _combine_triggers( egi_events[include_], remapping=event_ids) self.event_id = dict(zip([e for e in event_codes if e in include_names], event_ids)) else: # No events self.event_id = None egi_info['new_trigger'] = None info = _empty_info(egi_info['samp_rate']) my_time = datetime.datetime( egi_info['year'], egi_info['month'], egi_info['day'], egi_info['hour'], egi_info['minute'], egi_info['second']) my_timestamp = time.mktime(my_time.timetuple()) info['meas_date'] = (my_timestamp, 0) ch_names = [channel_naming % (i + 1) for i in range(egi_info['n_channels'])] ch_names.extend(list(egi_info['event_codes'])) if egi_info['new_trigger'] is not None: ch_names.append('STI 014') # our new_trigger nchan = len(ch_names) cals = np.repeat(cal, nchan) ch_coil = FIFF.FIFFV_COIL_EEG ch_kind = FIFF.FIFFV_EEG_CH chs = _create_chs(ch_names, cals, ch_coil, ch_kind, eog, (), (), misc) sti_ch_idx = [i for i, name in enumerate(ch_names) if name.startswith('STI') or name in event_codes] for idx in sti_ch_idx: chs[idx].update({'unit_mul': FIFF.FIFF_UNITM_NONE, 'cal': 1., 'kind': FIFF.FIFFV_STIM_CH, 'coil_type': FIFF.FIFFV_COIL_NONE, 'unit': FIFF.FIFF_UNIT_NONE}) info['chs'] = chs info._update_redundant() super(RawEGI, self).__init__( info, preload, orig_format=egi_info['orig_format'], filenames=[input_fname], last_samps=[egi_info['n_samples'] - 1], raw_extras=[egi_info], verbose=verbose) def _read_segment_file(self, data, idx, fi, start, stop, cals, mult): """Read a segment of data from a file.""" egi_info = self._raw_extras[fi] dtype = egi_info['dtype'] n_chan_read = egi_info['n_channels'] + egi_info['n_events'] offset = 36 + egi_info['n_events'] * 4 trigger_ch = egi_info['new_trigger'] _read_segments_file(self, data, idx, fi, start, stop, cals, mult, dtype=dtype, n_channels=n_chan_read, offset=offset, trigger_ch=trigger_ch)
import os.path as op import gc import pytest import numpy as np from numpy.testing import (assert_array_almost_equal, assert_equal, assert_array_equal, assert_allclose) from mne.datasets import testing from mne import (read_forward_solution, apply_forward, apply_forward_raw, average_forward_solutions, write_forward_solution, convert_forward_solution, SourceEstimate, pick_types_forward, read_evokeds, VectorSourceEstimate) from mne.io import read_info from mne.label import read_label from mne.utils import requires_mne, run_subprocess from mne.forward import (restrict_forward_to_stc, restrict_forward_to_label, Forward, is_fixed_orient, compute_orient_prior, compute_depth_prior) from mne.channels import equalize_channels data_path = testing.data_path(download=False) fname_meeg = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-eeg-oct-4-fwd.fif') fname_meeg_grad = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-eeg-oct-2-grad-fwd.fif') fname_evoked = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data', 'test-ave.fif') def assert_forward_allclose(f1, f2, rtol=1e-7): """Compare two potentially converted forward solutions.""" assert_allclose(f1['sol']['data'], f2['sol']['data'], rtol=rtol) assert f1['sol']['ncol'] == f2['sol']['ncol'] assert f1['sol']['ncol'] == f1['sol']['data'].shape[1] assert_allclose(f1['source_nn'], f2['source_nn'], rtol=rtol) if f1['sol_grad'] is not None: assert (f2['sol_grad'] is not None) assert_allclose(f1['sol_grad']['data'], f2['sol_grad']['data']) assert f1['sol_grad']['ncol'] == f2['sol_grad']['ncol'] assert f1['sol_grad']['ncol'] == f1['sol_grad']['data'].shape[1] else: assert (f2['sol_grad'] is None) assert f1['source_ori'] == f2['source_ori'] assert f1['surf_ori'] == f2['surf_ori'] assert f1['src'][0]['coord_frame'] == f1['src'][0]['coord_frame'] @testing.requires_testing_data def test_convert_forward(): """Test converting forward solution between different representations.""" fwd = read_forward_solution(fname_meeg_grad) fwd_repr = repr(fwd) assert ('306' in fwd_repr) assert ('60' in fwd_repr) assert (fwd_repr) assert (isinstance(fwd, Forward)) # look at surface orientation fwd_surf = convert_forward_solution(fwd, surf_ori=True) # go back fwd_new = convert_forward_solution(fwd_surf, surf_ori=False) assert (repr(fwd_new)) assert (isinstance(fwd_new, Forward)) assert_forward_allclose(fwd, fwd_new) del fwd_new gc.collect() # now go to fixed fwd_fixed = convert_forward_solution(fwd_surf, surf_ori=True, force_fixed=True, use_cps=False) del fwd_surf gc.collect() assert (repr(fwd_fixed)) assert (isinstance(fwd_fixed, Forward)) assert (is_fixed_orient(fwd_fixed)) # now go back to cartesian (original condition) fwd_new = convert_forward_solution(fwd_fixed, surf_ori=False, force_fixed=False) assert (repr(fwd_new)) assert (isinstance(fwd_new, Forward)) assert_forward_allclose(fwd, fwd_new) del fwd, fwd_new, fwd_fixed gc.collect() @pytest.mark.slowtest @testing.requires_testing_data def test_io_forward(tmpdir): """Test IO for forward solutions.""" # do extensive tests with MEEG + grad n_channels, n_src = 366, 108 fwd = read_forward_solution(fname_meeg_grad) assert (isinstance(fwd, Forward)) fwd = read_forward_solution(fname_meeg_grad) fwd = convert_forward_solution(fwd, surf_ori=True) leadfield = fwd['sol']['data'] assert_equal(leadfield.shape, (n_channels, n_src)) assert_equal(len(fwd['sol']['row_names']), n_channels) fname_temp = tmpdir.join('test-fwd.fif') with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_temp, fwd, overwrite=True) fwd = read_forward_solution(fname_meeg_grad) fwd = convert_forward_solution(fwd, surf_ori=True) fwd_read = read_forward_solution(fname_temp) fwd_read = convert_forward_solution(fwd_read, surf_ori=True) leadfield = fwd_read['sol']['data'] assert_equal(leadfield.shape, (n_channels, n_src)) assert_equal(len(fwd_read['sol']['row_names']), n_channels) assert_equal(len(fwd_read['info']['chs']), n_channels) assert ('dev_head_t' in fwd_read['info']) assert ('mri_head_t' in fwd_read) assert_array_almost_equal(fwd['sol']['data'], fwd_read['sol']['data']) fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=False) with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_temp, fwd, overwrite=True) fwd_read = read_forward_solution(fname_temp) fwd_read = convert_forward_solution(fwd_read, surf_ori=True, force_fixed=True, use_cps=False) assert (repr(fwd_read)) assert (isinstance(fwd_read, Forward)) assert (is_fixed_orient(fwd_read)) assert_forward_allclose(fwd, fwd_read) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) leadfield = fwd['sol']['data'] assert_equal(leadfield.shape, (n_channels, 1494 / 3)) assert_equal(len(fwd['sol']['row_names']), n_channels) assert_equal(len(fwd['info']['chs']), n_channels) assert ('dev_head_t' in fwd['info']) assert ('mri_head_t' in fwd) assert (fwd['surf_ori']) with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_temp, fwd, overwrite=True) fwd_read = read_forward_solution(fname_temp) fwd_read = convert_forward_solution(fwd_read, surf_ori=True, force_fixed=True, use_cps=True) assert (repr(fwd_read)) assert (isinstance(fwd_read, Forward)) assert (is_fixed_orient(fwd_read)) assert_forward_allclose(fwd, fwd_read) fwd = read_forward_solution(fname_meeg_grad) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) leadfield = fwd['sol']['data'] assert_equal(leadfield.shape, (n_channels, n_src / 3)) assert_equal(len(fwd['sol']['row_names']), n_channels) assert_equal(len(fwd['info']['chs']), n_channels) assert ('dev_head_t' in fwd['info']) assert ('mri_head_t' in fwd) assert (fwd['surf_ori']) with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_temp, fwd, overwrite=True) fwd_read = read_forward_solution(fname_temp) fwd_read = convert_forward_solution(fwd_read, surf_ori=True, force_fixed=True, use_cps=True) assert (repr(fwd_read)) assert (isinstance(fwd_read, Forward)) assert (is_fixed_orient(fwd_read)) assert_forward_allclose(fwd, fwd_read) # test warnings on bad filenames fwd = read_forward_solution(fname_meeg_grad) fwd_badname = tmpdir.join('test-bad-name.fif.gz') with pytest.warns(RuntimeWarning, match='end with'): write_forward_solution(fwd_badname, fwd) with pytest.warns(RuntimeWarning, match='end with'): read_forward_solution(fwd_badname) fwd = read_forward_solution(fname_meeg) write_forward_solution(fname_temp, fwd, overwrite=True) fwd_read = read_forward_solution(fname_temp) assert_forward_allclose(fwd, fwd_read) @testing.requires_testing_data def test_apply_forward(): """Test projection of source space data to sensor space.""" start = 0 stop = 5 n_times = stop - start - 1 sfreq = 10.0 t_start = 0.123 fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) fwd = pick_types_forward(fwd, meg=True) assert isinstance(fwd, Forward) vertno = [fwd['src'][0]['vertno'], fwd['src'][1]['vertno']] stc_data = np.ones((len(vertno[0]) + len(vertno[1]), n_times)) stc = SourceEstimate(stc_data, vertno, tmin=t_start, tstep=1.0 / sfreq) gain_sum = np.sum(fwd['sol']['data'], axis=1) # Evoked evoked = read_evokeds(fname_evoked, condition=0) evoked.pick_types(meg=True) with pytest.warns(RuntimeWarning, match='only .* positive values'): evoked = apply_forward(fwd, stc, evoked.info, start=start, stop=stop) data = evoked.data times = evoked.times # do some tests assert_array_almost_equal(evoked.info['sfreq'], sfreq) assert_array_almost_equal(np.sum(data, axis=1), n_times * gain_sum) assert_array_almost_equal(times[0], t_start) assert_array_almost_equal(times[-1], t_start + (n_times - 1) / sfreq) # vector stc_vec = VectorSourceEstimate( fwd['source_nn'][:, :, np.newaxis] * stc.data[:, np.newaxis], stc.vertices, stc.tmin, stc.tstep) with pytest.warns(RuntimeWarning, match='very large'): evoked_2 = apply_forward(fwd, stc_vec, evoked.info) assert np.abs(evoked_2.data).mean() > 1e-5 assert_allclose(evoked.data, evoked_2.data, atol=1e-10) # Raw with pytest.warns(RuntimeWarning, match='only .* positive values'): raw_proj = apply_forward_raw(fwd, stc, evoked.info, start=start, stop=stop) data, times = raw_proj[:, :] # do some tests assert_array_almost_equal(raw_proj.info['sfreq'], sfreq) assert_array_almost_equal(np.sum(data, axis=1), n_times * gain_sum) atol = 1. / sfreq assert_allclose(raw_proj.first_samp / sfreq, t_start, atol=atol) assert_allclose(raw_proj.last_samp / sfreq, t_start + (n_times - 1) / sfreq, atol=atol) @testing.requires_testing_data def test_restrict_forward_to_stc(tmpdir): """Test restriction of source space to source SourceEstimate.""" start = 0 stop = 5 n_times = stop - start - 1 sfreq = 10.0 t_start = 0.123 fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) fwd = pick_types_forward(fwd, meg=True) vertno = [fwd['src'][0]['vertno'][0:15], fwd['src'][1]['vertno'][0:5]] stc_data = np.ones((len(vertno[0]) + len(vertno[1]), n_times)) stc = SourceEstimate(stc_data, vertno, tmin=t_start, tstep=1.0 / sfreq) fwd_out = restrict_forward_to_stc(fwd, stc) assert (isinstance(fwd_out, Forward)) assert_equal(fwd_out['sol']['ncol'], 20) assert_equal(fwd_out['src'][0]['nuse'], 15) assert_equal(fwd_out['src'][1]['nuse'], 5) assert_equal(fwd_out['src'][0]['vertno'], fwd['src'][0]['vertno'][0:15]) assert_equal(fwd_out['src'][1]['vertno'], fwd['src'][1]['vertno'][0:5]) fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=False) fwd = pick_types_forward(fwd, meg=True) vertno = [fwd['src'][0]['vertno'][0:15], fwd['src'][1]['vertno'][0:5]] stc_data = np.ones((len(vertno[0]) + len(vertno[1]), n_times)) stc = SourceEstimate(stc_data, vertno, tmin=t_start, tstep=1.0 / sfreq) fwd_out = restrict_forward_to_stc(fwd, stc) assert_equal(fwd_out['sol']['ncol'], 60) assert_equal(fwd_out['src'][0]['nuse'], 15) assert_equal(fwd_out['src'][1]['nuse'], 5) assert_equal(fwd_out['src'][0]['vertno'], fwd['src'][0]['vertno'][0:15]) assert_equal(fwd_out['src'][1]['vertno'], fwd['src'][1]['vertno'][0:5]) # Test saving the restricted forward object. This only works if all fields # are properly accounted for. fname_copy = tmpdir.join('copy-fwd.fif') with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_copy, fwd_out, overwrite=True) fwd_out_read = read_forward_solution(fname_copy) fwd_out_read = convert_forward_solution(fwd_out_read, surf_ori=True, force_fixed=False) assert_forward_allclose(fwd_out, fwd_out_read) @testing.requires_testing_data def test_restrict_forward_to_label(tmpdir): """Test restriction of source space to label.""" fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) fwd = pick_types_forward(fwd, meg=True) label_path = op.join(data_path, 'MEG', 'sample', 'labels') labels = ['Aud-lh', 'Vis-rh'] label_lh = read_label(op.join(label_path, labels[0] + '.label')) label_rh = read_label(op.join(label_path, labels[1] + '.label')) fwd_out = restrict_forward_to_label(fwd, [label_lh, label_rh]) src_sel_lh = np.intersect1d(fwd['src'][0]['vertno'], label_lh.vertices) src_sel_lh = np.searchsorted(fwd['src'][0]['vertno'], src_sel_lh) vertno_lh = fwd['src'][0]['vertno'][src_sel_lh] nuse_lh = fwd['src'][0]['nuse'] src_sel_rh = np.intersect1d(fwd['src'][1]['vertno'], label_rh.vertices) src_sel_rh = np.searchsorted(fwd['src'][1]['vertno'], src_sel_rh) vertno_rh = fwd['src'][1]['vertno'][src_sel_rh] src_sel_rh += nuse_lh assert_equal(fwd_out['sol']['ncol'], len(src_sel_lh) + len(src_sel_rh)) assert_equal(fwd_out['src'][0]['nuse'], len(src_sel_lh)) assert_equal(fwd_out['src'][1]['nuse'], len(src_sel_rh)) assert_equal(fwd_out['src'][0]['vertno'], vertno_lh) assert_equal(fwd_out['src'][1]['vertno'], vertno_rh) fwd = read_forward_solution(fname_meeg) fwd = pick_types_forward(fwd, meg=True) label_path = op.join(data_path, 'MEG', 'sample', 'labels') labels = ['Aud-lh', 'Vis-rh'] label_lh = read_label(op.join(label_path, labels[0] + '.label')) label_rh = read_label(op.join(label_path, labels[1] + '.label')) fwd_out = restrict_forward_to_label(fwd, [label_lh, label_rh]) src_sel_lh = np.intersect1d(fwd['src'][0]['vertno'], label_lh.vertices) src_sel_lh = np.searchsorted(fwd['src'][0]['vertno'], src_sel_lh) vertno_lh = fwd['src'][0]['vertno'][src_sel_lh] nuse_lh = fwd['src'][0]['nuse'] src_sel_rh = np.intersect1d(fwd['src'][1]['vertno'], label_rh.vertices) src_sel_rh = np.searchsorted(fwd['src'][1]['vertno'], src_sel_rh) vertno_rh = fwd['src'][1]['vertno'][src_sel_rh] src_sel_rh += nuse_lh assert_equal(fwd_out['sol']['ncol'], 3 * (len(src_sel_lh) + len(src_sel_rh))) assert_equal(fwd_out['src'][0]['nuse'], len(src_sel_lh)) assert_equal(fwd_out['src'][1]['nuse'], len(src_sel_rh)) assert_equal(fwd_out['src'][0]['vertno'], vertno_lh) assert_equal(fwd_out['src'][1]['vertno'], vertno_rh) # Test saving the restricted forward object. This only works if all fields # are properly accounted for. fname_copy = tmpdir.join('copy-fwd.fif') write_forward_solution(fname_copy, fwd_out, overwrite=True) fwd_out_read = read_forward_solution(fname_copy) assert_forward_allclose(fwd_out, fwd_out_read) @testing.requires_testing_data @requires_mne def test_average_forward_solution(tmpdir): """Test averaging forward solutions.""" fwd = read_forward_solution(fname_meeg) # input not a list pytest.raises(TypeError, average_forward_solutions, 1) # list is too short pytest.raises(ValueError, average_forward_solutions, []) # negative weights pytest.raises(ValueError, average_forward_solutions, [fwd, fwd], [-1, 0]) # all zero weights pytest.raises(ValueError, average_forward_solutions, [fwd, fwd], [0, 0]) # weights not same length pytest.raises(ValueError, average_forward_solutions, [fwd, fwd], [0, 0, 0]) # list does not only have all dict() pytest.raises(TypeError, average_forward_solutions, [1, fwd]) # try an easy case fwd_copy = average_forward_solutions([fwd]) assert (isinstance(fwd_copy, Forward)) assert_array_equal(fwd['sol']['data'], fwd_copy['sol']['data']) # modify a fwd solution, save it, use MNE to average with old one fwd_copy['sol']['data'] *= 0.5 fname_copy = str(tmpdir.join('copy-fwd.fif')) write_forward_solution(fname_copy, fwd_copy, overwrite=True) cmd = ('mne_average_forward_solutions', '--fwd', fname_meeg, '--fwd', fname_copy, '--out', fname_copy) run_subprocess(cmd) # now let's actually do it, with one filename and one fwd fwd_ave = average_forward_solutions([fwd, fwd_copy]) assert_array_equal(0.75 * fwd['sol']['data'], fwd_ave['sol']['data']) # fwd_ave_mne = read_forward_solution(fname_copy) # assert_array_equal(fwd_ave_mne['sol']['data'], fwd_ave['sol']['data']) # with gradient fwd = read_forward_solution(fname_meeg_grad) fwd_ave = average_forward_solutions([fwd, fwd]) assert_forward_allclose(fwd, fwd_ave) @testing.requires_testing_data def test_priors(): """Test prior computations.""" # Depth prior fwd = read_forward_solution(fname_meeg) assert not is_fixed_orient(fwd) n_sources = fwd['nsource'] info = read_info(fname_evoked) depth_prior = compute_depth_prior(fwd, info, exp=0.8) assert depth_prior.shape == (3 * n_sources,) depth_prior = compute_depth_prior(fwd, info, exp=0.) assert_array_equal(depth_prior, 1.) with pytest.raises(ValueError, match='must be "whiten"'): compute_depth_prior(fwd, info, limit_depth_chs='foo') with pytest.raises(ValueError, match='noise_cov must be a Covariance'): compute_depth_prior(fwd, info, limit_depth_chs='whiten') fwd_fixed = convert_forward_solution(fwd, force_fixed=True) depth_prior = compute_depth_prior(fwd_fixed, info=info) assert depth_prior.shape == (n_sources,) # Orientation prior orient_prior = compute_orient_prior(fwd, 1.) assert_array_equal(orient_prior, 1.) orient_prior = compute_orient_prior(fwd_fixed, 0.) assert_array_equal(orient_prior, 1.) with pytest.raises(ValueError, match='oriented in surface coordinates'): compute_orient_prior(fwd, 0.5) fwd_surf_ori = convert_forward_solution(fwd, surf_ori=True) orient_prior = compute_orient_prior(fwd_surf_ori, 0.5) assert all(np.in1d(orient_prior, (0.5, 1.))) with pytest.raises(ValueError, match='between 0 and 1'): compute_orient_prior(fwd_surf_ori, -0.5) with pytest.raises(ValueError, match='with fixed orientation'): compute_orient_prior(fwd_fixed, 0.5) @testing.requires_testing_data def test_equalize_channels(): """Test equalization of channels for instances of Forward.""" fwd1 = read_forward_solution(fname_meeg) fwd1.pick_channels(['EEG 001', 'EEG 002', 'EEG 003']) fwd2 = fwd1.copy().pick_channels(['EEG 002', 'EEG 001'], ordered=True) fwd1, fwd2 = equalize_channels([fwd1, fwd2]) assert fwd1.ch_names == ['EEG 001', 'EEG 002'] assert fwd2.ch_names == ['EEG 001', 'EEG 002']
rkmaddox/mne-python
mne/forward/tests/test_forward.py
mne/io/egi/egi.py
"""Mayavi/traits GUI for averaging two sets of KIT marker points.""" # Authors: Christian Brodbeck <christianbrodbeck@nyu.edu> # # License: BSD (3-clause) import os import sys import numpy as np from mayavi.tools.mlab_scene_model import MlabSceneModel from pyface.api import confirm, error, FileDialog, OK, YES from traits.api import (HasTraits, HasPrivateTraits, on_trait_change, cached_property, Instance, Property, Array, Bool, Button, Enum, File, Float, List, Str, ArrayOrNone) from traitsui.api import View, Item, HGroup, VGroup, CheckListEditor from traitsui.menu import Action, CancelButton from ..transforms import apply_trans, rotation, translation from ..coreg import fit_matched_points from ..io.kit import read_mrk from ..io._digitization import _write_dig_points from ._viewer import PointObject from ._backend import _get_pyface_backend if _get_pyface_backend() == 'wx': mrk_wildcard = [ 'Supported Files (*.sqd, *.mrk, *.txt, *.pickled)|*.sqd;*.mrk;*.txt;*.pickled', # noqa:E501 'Sqd marker file (*.sqd;*.mrk)|*.sqd;*.mrk', 'Text marker file (*.txt)|*.txt', 'Pickled markers (*.pickled)|*.pickled'] mrk_out_wildcard = ["Tab separated values file (*.txt)|*.txt"] else: if sys.platform in ('win32', 'linux2'): # on Windows and Ubuntu, multiple wildcards does not seem to work mrk_wildcard = ["*.sqd", "*.mrk", "*.txt", "*.pickled"] else: mrk_wildcard = ["*.sqd;*.mrk;*.txt;*.pickled"] mrk_out_wildcard = "*.txt" out_ext = '.txt' use_editor_v = CheckListEditor(cols=1, values=[(i, str(i)) for i in range(5)]) use_editor_h = CheckListEditor(cols=5, values=[(i, str(i)) for i in range(5)]) mrk_view_editable = View( VGroup('file', Item('name', show_label=False, style='readonly'), HGroup( Item('use', editor=use_editor_v, enabled_when="enabled", style='custom'), 'points', ), HGroup(Item('clear', enabled_when="can_save", show_label=False), Item('save_as', enabled_when="can_save", show_label=False)), )) mrk_view_basic = View( VGroup('file', Item('name', show_label=False, style='readonly'), Item('use', editor=use_editor_h, enabled_when="enabled", style='custom'), HGroup(Item('clear', enabled_when="can_save", show_label=False), Item('edit', show_label=False), Item('switch_left_right', label="Switch Left/Right", show_label=False), Item('reorder', show_label=False), Item('save_as', enabled_when="can_save", show_label=False)), )) mrk_view_edit = View(VGroup('points')) class ReorderDialog(HasPrivateTraits): """Dialog for reordering marker points.""" order = Str("0 1 2 3 4") index = Property(List, depends_on='order') is_ok = Property(Bool, depends_on='order') view = View( Item('order', label='New order (five space delimited numbers)'), buttons=[CancelButton, Action(name='OK', enabled_when='is_ok')]) def _get_index(self): try: return [int(i) for i in self.order.split()] except ValueError: return [] def _get_is_ok(self): return sorted(self.index) == [0, 1, 2, 3, 4] class MarkerPoints(HasPrivateTraits): """Represent 5 marker points.""" points = Array(float, (5, 3)) can_save = Property(depends_on='points') save_as = Button() view = View(VGroup('points', Item('save_as', enabled_when='can_save'))) @cached_property def _get_can_save(self): return np.any(self.points) def _save_as_fired(self): dlg = FileDialog(action="save as", wildcard=mrk_out_wildcard, default_filename=self.name, default_directory=self.dir) dlg.open() if dlg.return_code != OK: return path, ext = os.path.splitext(dlg.path) if not path.endswith(out_ext) and len(ext) != 0: ValueError("The extension '%s' is not supported." % ext) path = path + out_ext if os.path.exists(path): answer = confirm(None, "The file %r already exists. Should it " "be replaced?", "Overwrite File?") if answer != YES: return self.save(path) def save(self, path): """Save the marker points. Parameters ---------- path : str Path to the file to write. The kind of file to write is determined based on the extension: '.txt' for tab separated text file, '.pickled' for pickled file. """ _write_dig_points(path, self.points) class MarkerPointSource(MarkerPoints): # noqa: D401 """MarkerPoints subclass for source files.""" file = File(filter=mrk_wildcard, exists=True) name = Property(Str, depends_on='file') dir = Property(Str, depends_on='file') use = List(list(range(5)), desc="Which points to use for the interpolated " "marker.") enabled = Property(Bool, depends_on=['points', 'use']) clear = Button(desc="Clear the current marker data") edit = Button(desc="Edit the marker coordinates manually") switch_left_right = Button( desc="Switch left and right marker points; this is intended to " "correct for markers that were attached in the wrong order") reorder = Button(desc="Change the order of the marker points") view = mrk_view_basic @cached_property def _get_enabled(self): return np.any(self.points) @cached_property def _get_dir(self): if self.file: return os.path.dirname(self.file) @cached_property def _get_name(self): if self.file: return os.path.basename(self.file) @on_trait_change('file') def load(self, fname): if not fname: self.reset_traits(['points']) return try: pts = read_mrk(fname) except Exception as err: error(None, str(err), "Error Reading mrk") self.reset_traits(['points']) else: self.points = pts def _clear_fired(self): self.reset_traits(['file', 'points', 'use']) def _edit_fired(self): self.edit_traits(view=mrk_view_edit) def _reorder_fired(self): dlg = ReorderDialog() ui = dlg.edit_traits(kind='modal') if not ui.result: # user pressed cancel return self.points = self.points[dlg.index] def _switch_left_right_fired(self): self.points = self.points[[1, 0, 2, 4, 3]] class MarkerPointDest(MarkerPoints): # noqa: D401 """MarkerPoints subclass that serves for derived points.""" src1 = Instance(MarkerPointSource) src2 = Instance(MarkerPointSource) name = Property(Str, depends_on='src1.name,src2.name') dir = Property(Str, depends_on='src1.dir,src2.dir') points = Property(ArrayOrNone(float, (5, 3)), depends_on=['method', 'src1.points', 'src1.use', 'src2.points', 'src2.use']) enabled = Property(Bool, depends_on=['points']) method = Enum('Transform', 'Average', desc="Transform: estimate a rotation" "/translation from mrk1 to mrk2; Average: use the average " "of the mrk1 and mrk2 coordinates for each point.") view = View(VGroup(Item('method', style='custom'), Item('save_as', enabled_when='can_save', show_label=False))) @cached_property def _get_dir(self): return self.src1.dir @cached_property def _get_name(self): n1 = self.src1.name n2 = self.src2.name if not n1: if n2: return n2 else: return '' elif not n2: return n1 if n1 == n2: return n1 i = 0 l1 = len(n1) - 1 l2 = len(n1) - 2 while n1[i] == n2[i]: if i == l1: return n1 elif i == l2: return n2 i += 1 return n1[:i] @cached_property def _get_enabled(self): return np.any(self.points) @cached_property def _get_points(self): # in case only one or no source is enabled if not (self.src1 and self.src1.enabled): if (self.src2 and self.src2.enabled): return self.src2.points else: return np.zeros((5, 3)) elif not (self.src2 and self.src2.enabled): return self.src1.points # Average method if self.method == 'Average': if len(np.union1d(self.src1.use, self.src2.use)) < 5: error(None, "Need at least one source for each point.", "Marker Average Error") return np.zeros((5, 3)) pts = (self.src1.points + self.src2.points) / 2. for i in np.setdiff1d(self.src1.use, self.src2.use): pts[i] = self.src1.points[i] for i in np.setdiff1d(self.src2.use, self.src1.use): pts[i] = self.src2.points[i] return pts # Transform method idx = np.intersect1d(np.array(self.src1.use), np.array(self.src2.use), assume_unique=True) if len(idx) < 3: error(None, "Need at least three shared points for trans" "formation.", "Marker Interpolation Error") return np.zeros((5, 3)) src_pts = self.src1.points[idx] tgt_pts = self.src2.points[idx] est = fit_matched_points(src_pts, tgt_pts, out='params') rot = np.array(est[:3]) / 2. tra = np.array(est[3:]) / 2. if len(self.src1.use) == 5: trans = np.dot(translation(*tra), rotation(*rot)) pts = apply_trans(trans, self.src1.points) elif len(self.src2.use) == 5: trans = np.dot(translation(* -tra), rotation(* -rot)) pts = apply_trans(trans, self.src2.points) else: trans1 = np.dot(translation(*tra), rotation(*rot)) pts = apply_trans(trans1, self.src1.points) trans2 = np.dot(translation(* -tra), rotation(* -rot)) for i in np.setdiff1d(self.src2.use, self.src1.use): pts[i] = apply_trans(trans2, self.src2.points[i]) return pts class CombineMarkersModel(HasPrivateTraits): """Combine markers model.""" mrk1_file = Instance(File) mrk2_file = Instance(File) mrk1 = Instance(MarkerPointSource) mrk2 = Instance(MarkerPointSource) mrk3 = Instance(MarkerPointDest) clear = Button(desc="Clear the current marker data") # stats distance = Property(Str, depends_on=['mrk1.points', 'mrk2.points']) def _clear_fired(self): self.mrk1.clear = True self.mrk2.clear = True self.mrk3.reset_traits(['method']) def _mrk1_default(self): return MarkerPointSource() def _mrk1_file_default(self): return self.mrk1.trait('file') def _mrk2_default(self): return MarkerPointSource() def _mrk2_file_default(self): return self.mrk2.trait('file') def _mrk3_default(self): return MarkerPointDest(src1=self.mrk1, src2=self.mrk2) @cached_property def _get_distance(self): if (self.mrk1 is None or self.mrk2 is None or (not np.any(self.mrk1.points)) or (not np.any(self.mrk2.points))): return "" ds = np.sqrt(np.sum((self.mrk1.points - self.mrk2.points) ** 2, 1)) desc = '\t'.join('%.1f mm' % (d * 1000) for d in ds) return desc class CombineMarkersPanel(HasTraits): # noqa: D401 """Has two marker points sources and interpolates to a third one.""" model = Instance(CombineMarkersModel, ()) # model references for UI mrk1 = Instance(MarkerPointSource) mrk2 = Instance(MarkerPointSource) mrk3 = Instance(MarkerPointDest) distance = Str # Visualization scene = Instance(MlabSceneModel) scale = Float(5e-3) mrk1_obj = Instance(PointObject) mrk2_obj = Instance(PointObject) mrk3_obj = Instance(PointObject) trans = Array() view = View(VGroup(VGroup(Item('mrk1', style='custom'), Item('mrk1_obj', style='custom'), show_labels=False, label="Source Marker 1", show_border=True), VGroup(Item('mrk2', style='custom'), Item('mrk2_obj', style='custom'), show_labels=False, label="Source Marker 2", show_border=True), VGroup(Item('distance', style='readonly'), label='Stats', show_border=True), VGroup(Item('mrk3', style='custom'), Item('mrk3_obj', style='custom'), show_labels=False, label="New Marker", show_border=True), )) def _mrk1_default(self): return self.model.mrk1 def _mrk2_default(self): return self.model.mrk2 def _mrk3_default(self): return self.model.mrk3 def __init__(self, *args, **kwargs): # noqa: D102 super(CombineMarkersPanel, self).__init__(*args, **kwargs) self.model.sync_trait('distance', self, 'distance', mutual=False) self.mrk1_obj = PointObject(scene=self.scene, color=(0.608, 0.216, 0.216), point_scale=self.scale) self.model.mrk1.sync_trait( 'enabled', self.mrk1_obj, 'visible', mutual=False) self.mrk2_obj = PointObject(scene=self.scene, color=(0.216, 0.608, 0.216), point_scale=self.scale) self.model.mrk2.sync_trait( 'enabled', self.mrk2_obj, 'visible', mutual=False) self.mrk3_obj = PointObject(scene=self.scene, color=(0.588, 0.784, 1.), point_scale=self.scale) self.model.mrk3.sync_trait( 'enabled', self.mrk3_obj, 'visible', mutual=False) @on_trait_change('model:mrk1:points,trans') def _update_mrk1(self): if self.mrk1_obj is not None: self.mrk1_obj.points = apply_trans(self.trans, self.model.mrk1.points) @on_trait_change('model:mrk2:points,trans') def _update_mrk2(self): if self.mrk2_obj is not None: self.mrk2_obj.points = apply_trans(self.trans, self.model.mrk2.points) @on_trait_change('model:mrk3:points,trans') def _update_mrk3(self): if self.mrk3_obj is not None: self.mrk3_obj.points = apply_trans(self.trans, self.model.mrk3.points)
import os.path as op import gc import pytest import numpy as np from numpy.testing import (assert_array_almost_equal, assert_equal, assert_array_equal, assert_allclose) from mne.datasets import testing from mne import (read_forward_solution, apply_forward, apply_forward_raw, average_forward_solutions, write_forward_solution, convert_forward_solution, SourceEstimate, pick_types_forward, read_evokeds, VectorSourceEstimate) from mne.io import read_info from mne.label import read_label from mne.utils import requires_mne, run_subprocess from mne.forward import (restrict_forward_to_stc, restrict_forward_to_label, Forward, is_fixed_orient, compute_orient_prior, compute_depth_prior) from mne.channels import equalize_channels data_path = testing.data_path(download=False) fname_meeg = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-eeg-oct-4-fwd.fif') fname_meeg_grad = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-eeg-oct-2-grad-fwd.fif') fname_evoked = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data', 'test-ave.fif') def assert_forward_allclose(f1, f2, rtol=1e-7): """Compare two potentially converted forward solutions.""" assert_allclose(f1['sol']['data'], f2['sol']['data'], rtol=rtol) assert f1['sol']['ncol'] == f2['sol']['ncol'] assert f1['sol']['ncol'] == f1['sol']['data'].shape[1] assert_allclose(f1['source_nn'], f2['source_nn'], rtol=rtol) if f1['sol_grad'] is not None: assert (f2['sol_grad'] is not None) assert_allclose(f1['sol_grad']['data'], f2['sol_grad']['data']) assert f1['sol_grad']['ncol'] == f2['sol_grad']['ncol'] assert f1['sol_grad']['ncol'] == f1['sol_grad']['data'].shape[1] else: assert (f2['sol_grad'] is None) assert f1['source_ori'] == f2['source_ori'] assert f1['surf_ori'] == f2['surf_ori'] assert f1['src'][0]['coord_frame'] == f1['src'][0]['coord_frame'] @testing.requires_testing_data def test_convert_forward(): """Test converting forward solution between different representations.""" fwd = read_forward_solution(fname_meeg_grad) fwd_repr = repr(fwd) assert ('306' in fwd_repr) assert ('60' in fwd_repr) assert (fwd_repr) assert (isinstance(fwd, Forward)) # look at surface orientation fwd_surf = convert_forward_solution(fwd, surf_ori=True) # go back fwd_new = convert_forward_solution(fwd_surf, surf_ori=False) assert (repr(fwd_new)) assert (isinstance(fwd_new, Forward)) assert_forward_allclose(fwd, fwd_new) del fwd_new gc.collect() # now go to fixed fwd_fixed = convert_forward_solution(fwd_surf, surf_ori=True, force_fixed=True, use_cps=False) del fwd_surf gc.collect() assert (repr(fwd_fixed)) assert (isinstance(fwd_fixed, Forward)) assert (is_fixed_orient(fwd_fixed)) # now go back to cartesian (original condition) fwd_new = convert_forward_solution(fwd_fixed, surf_ori=False, force_fixed=False) assert (repr(fwd_new)) assert (isinstance(fwd_new, Forward)) assert_forward_allclose(fwd, fwd_new) del fwd, fwd_new, fwd_fixed gc.collect() @pytest.mark.slowtest @testing.requires_testing_data def test_io_forward(tmpdir): """Test IO for forward solutions.""" # do extensive tests with MEEG + grad n_channels, n_src = 366, 108 fwd = read_forward_solution(fname_meeg_grad) assert (isinstance(fwd, Forward)) fwd = read_forward_solution(fname_meeg_grad) fwd = convert_forward_solution(fwd, surf_ori=True) leadfield = fwd['sol']['data'] assert_equal(leadfield.shape, (n_channels, n_src)) assert_equal(len(fwd['sol']['row_names']), n_channels) fname_temp = tmpdir.join('test-fwd.fif') with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_temp, fwd, overwrite=True) fwd = read_forward_solution(fname_meeg_grad) fwd = convert_forward_solution(fwd, surf_ori=True) fwd_read = read_forward_solution(fname_temp) fwd_read = convert_forward_solution(fwd_read, surf_ori=True) leadfield = fwd_read['sol']['data'] assert_equal(leadfield.shape, (n_channels, n_src)) assert_equal(len(fwd_read['sol']['row_names']), n_channels) assert_equal(len(fwd_read['info']['chs']), n_channels) assert ('dev_head_t' in fwd_read['info']) assert ('mri_head_t' in fwd_read) assert_array_almost_equal(fwd['sol']['data'], fwd_read['sol']['data']) fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=False) with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_temp, fwd, overwrite=True) fwd_read = read_forward_solution(fname_temp) fwd_read = convert_forward_solution(fwd_read, surf_ori=True, force_fixed=True, use_cps=False) assert (repr(fwd_read)) assert (isinstance(fwd_read, Forward)) assert (is_fixed_orient(fwd_read)) assert_forward_allclose(fwd, fwd_read) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) leadfield = fwd['sol']['data'] assert_equal(leadfield.shape, (n_channels, 1494 / 3)) assert_equal(len(fwd['sol']['row_names']), n_channels) assert_equal(len(fwd['info']['chs']), n_channels) assert ('dev_head_t' in fwd['info']) assert ('mri_head_t' in fwd) assert (fwd['surf_ori']) with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_temp, fwd, overwrite=True) fwd_read = read_forward_solution(fname_temp) fwd_read = convert_forward_solution(fwd_read, surf_ori=True, force_fixed=True, use_cps=True) assert (repr(fwd_read)) assert (isinstance(fwd_read, Forward)) assert (is_fixed_orient(fwd_read)) assert_forward_allclose(fwd, fwd_read) fwd = read_forward_solution(fname_meeg_grad) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) leadfield = fwd['sol']['data'] assert_equal(leadfield.shape, (n_channels, n_src / 3)) assert_equal(len(fwd['sol']['row_names']), n_channels) assert_equal(len(fwd['info']['chs']), n_channels) assert ('dev_head_t' in fwd['info']) assert ('mri_head_t' in fwd) assert (fwd['surf_ori']) with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_temp, fwd, overwrite=True) fwd_read = read_forward_solution(fname_temp) fwd_read = convert_forward_solution(fwd_read, surf_ori=True, force_fixed=True, use_cps=True) assert (repr(fwd_read)) assert (isinstance(fwd_read, Forward)) assert (is_fixed_orient(fwd_read)) assert_forward_allclose(fwd, fwd_read) # test warnings on bad filenames fwd = read_forward_solution(fname_meeg_grad) fwd_badname = tmpdir.join('test-bad-name.fif.gz') with pytest.warns(RuntimeWarning, match='end with'): write_forward_solution(fwd_badname, fwd) with pytest.warns(RuntimeWarning, match='end with'): read_forward_solution(fwd_badname) fwd = read_forward_solution(fname_meeg) write_forward_solution(fname_temp, fwd, overwrite=True) fwd_read = read_forward_solution(fname_temp) assert_forward_allclose(fwd, fwd_read) @testing.requires_testing_data def test_apply_forward(): """Test projection of source space data to sensor space.""" start = 0 stop = 5 n_times = stop - start - 1 sfreq = 10.0 t_start = 0.123 fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) fwd = pick_types_forward(fwd, meg=True) assert isinstance(fwd, Forward) vertno = [fwd['src'][0]['vertno'], fwd['src'][1]['vertno']] stc_data = np.ones((len(vertno[0]) + len(vertno[1]), n_times)) stc = SourceEstimate(stc_data, vertno, tmin=t_start, tstep=1.0 / sfreq) gain_sum = np.sum(fwd['sol']['data'], axis=1) # Evoked evoked = read_evokeds(fname_evoked, condition=0) evoked.pick_types(meg=True) with pytest.warns(RuntimeWarning, match='only .* positive values'): evoked = apply_forward(fwd, stc, evoked.info, start=start, stop=stop) data = evoked.data times = evoked.times # do some tests assert_array_almost_equal(evoked.info['sfreq'], sfreq) assert_array_almost_equal(np.sum(data, axis=1), n_times * gain_sum) assert_array_almost_equal(times[0], t_start) assert_array_almost_equal(times[-1], t_start + (n_times - 1) / sfreq) # vector stc_vec = VectorSourceEstimate( fwd['source_nn'][:, :, np.newaxis] * stc.data[:, np.newaxis], stc.vertices, stc.tmin, stc.tstep) with pytest.warns(RuntimeWarning, match='very large'): evoked_2 = apply_forward(fwd, stc_vec, evoked.info) assert np.abs(evoked_2.data).mean() > 1e-5 assert_allclose(evoked.data, evoked_2.data, atol=1e-10) # Raw with pytest.warns(RuntimeWarning, match='only .* positive values'): raw_proj = apply_forward_raw(fwd, stc, evoked.info, start=start, stop=stop) data, times = raw_proj[:, :] # do some tests assert_array_almost_equal(raw_proj.info['sfreq'], sfreq) assert_array_almost_equal(np.sum(data, axis=1), n_times * gain_sum) atol = 1. / sfreq assert_allclose(raw_proj.first_samp / sfreq, t_start, atol=atol) assert_allclose(raw_proj.last_samp / sfreq, t_start + (n_times - 1) / sfreq, atol=atol) @testing.requires_testing_data def test_restrict_forward_to_stc(tmpdir): """Test restriction of source space to source SourceEstimate.""" start = 0 stop = 5 n_times = stop - start - 1 sfreq = 10.0 t_start = 0.123 fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) fwd = pick_types_forward(fwd, meg=True) vertno = [fwd['src'][0]['vertno'][0:15], fwd['src'][1]['vertno'][0:5]] stc_data = np.ones((len(vertno[0]) + len(vertno[1]), n_times)) stc = SourceEstimate(stc_data, vertno, tmin=t_start, tstep=1.0 / sfreq) fwd_out = restrict_forward_to_stc(fwd, stc) assert (isinstance(fwd_out, Forward)) assert_equal(fwd_out['sol']['ncol'], 20) assert_equal(fwd_out['src'][0]['nuse'], 15) assert_equal(fwd_out['src'][1]['nuse'], 5) assert_equal(fwd_out['src'][0]['vertno'], fwd['src'][0]['vertno'][0:15]) assert_equal(fwd_out['src'][1]['vertno'], fwd['src'][1]['vertno'][0:5]) fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=False) fwd = pick_types_forward(fwd, meg=True) vertno = [fwd['src'][0]['vertno'][0:15], fwd['src'][1]['vertno'][0:5]] stc_data = np.ones((len(vertno[0]) + len(vertno[1]), n_times)) stc = SourceEstimate(stc_data, vertno, tmin=t_start, tstep=1.0 / sfreq) fwd_out = restrict_forward_to_stc(fwd, stc) assert_equal(fwd_out['sol']['ncol'], 60) assert_equal(fwd_out['src'][0]['nuse'], 15) assert_equal(fwd_out['src'][1]['nuse'], 5) assert_equal(fwd_out['src'][0]['vertno'], fwd['src'][0]['vertno'][0:15]) assert_equal(fwd_out['src'][1]['vertno'], fwd['src'][1]['vertno'][0:5]) # Test saving the restricted forward object. This only works if all fields # are properly accounted for. fname_copy = tmpdir.join('copy-fwd.fif') with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_copy, fwd_out, overwrite=True) fwd_out_read = read_forward_solution(fname_copy) fwd_out_read = convert_forward_solution(fwd_out_read, surf_ori=True, force_fixed=False) assert_forward_allclose(fwd_out, fwd_out_read) @testing.requires_testing_data def test_restrict_forward_to_label(tmpdir): """Test restriction of source space to label.""" fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) fwd = pick_types_forward(fwd, meg=True) label_path = op.join(data_path, 'MEG', 'sample', 'labels') labels = ['Aud-lh', 'Vis-rh'] label_lh = read_label(op.join(label_path, labels[0] + '.label')) label_rh = read_label(op.join(label_path, labels[1] + '.label')) fwd_out = restrict_forward_to_label(fwd, [label_lh, label_rh]) src_sel_lh = np.intersect1d(fwd['src'][0]['vertno'], label_lh.vertices) src_sel_lh = np.searchsorted(fwd['src'][0]['vertno'], src_sel_lh) vertno_lh = fwd['src'][0]['vertno'][src_sel_lh] nuse_lh = fwd['src'][0]['nuse'] src_sel_rh = np.intersect1d(fwd['src'][1]['vertno'], label_rh.vertices) src_sel_rh = np.searchsorted(fwd['src'][1]['vertno'], src_sel_rh) vertno_rh = fwd['src'][1]['vertno'][src_sel_rh] src_sel_rh += nuse_lh assert_equal(fwd_out['sol']['ncol'], len(src_sel_lh) + len(src_sel_rh)) assert_equal(fwd_out['src'][0]['nuse'], len(src_sel_lh)) assert_equal(fwd_out['src'][1]['nuse'], len(src_sel_rh)) assert_equal(fwd_out['src'][0]['vertno'], vertno_lh) assert_equal(fwd_out['src'][1]['vertno'], vertno_rh) fwd = read_forward_solution(fname_meeg) fwd = pick_types_forward(fwd, meg=True) label_path = op.join(data_path, 'MEG', 'sample', 'labels') labels = ['Aud-lh', 'Vis-rh'] label_lh = read_label(op.join(label_path, labels[0] + '.label')) label_rh = read_label(op.join(label_path, labels[1] + '.label')) fwd_out = restrict_forward_to_label(fwd, [label_lh, label_rh]) src_sel_lh = np.intersect1d(fwd['src'][0]['vertno'], label_lh.vertices) src_sel_lh = np.searchsorted(fwd['src'][0]['vertno'], src_sel_lh) vertno_lh = fwd['src'][0]['vertno'][src_sel_lh] nuse_lh = fwd['src'][0]['nuse'] src_sel_rh = np.intersect1d(fwd['src'][1]['vertno'], label_rh.vertices) src_sel_rh = np.searchsorted(fwd['src'][1]['vertno'], src_sel_rh) vertno_rh = fwd['src'][1]['vertno'][src_sel_rh] src_sel_rh += nuse_lh assert_equal(fwd_out['sol']['ncol'], 3 * (len(src_sel_lh) + len(src_sel_rh))) assert_equal(fwd_out['src'][0]['nuse'], len(src_sel_lh)) assert_equal(fwd_out['src'][1]['nuse'], len(src_sel_rh)) assert_equal(fwd_out['src'][0]['vertno'], vertno_lh) assert_equal(fwd_out['src'][1]['vertno'], vertno_rh) # Test saving the restricted forward object. This only works if all fields # are properly accounted for. fname_copy = tmpdir.join('copy-fwd.fif') write_forward_solution(fname_copy, fwd_out, overwrite=True) fwd_out_read = read_forward_solution(fname_copy) assert_forward_allclose(fwd_out, fwd_out_read) @testing.requires_testing_data @requires_mne def test_average_forward_solution(tmpdir): """Test averaging forward solutions.""" fwd = read_forward_solution(fname_meeg) # input not a list pytest.raises(TypeError, average_forward_solutions, 1) # list is too short pytest.raises(ValueError, average_forward_solutions, []) # negative weights pytest.raises(ValueError, average_forward_solutions, [fwd, fwd], [-1, 0]) # all zero weights pytest.raises(ValueError, average_forward_solutions, [fwd, fwd], [0, 0]) # weights not same length pytest.raises(ValueError, average_forward_solutions, [fwd, fwd], [0, 0, 0]) # list does not only have all dict() pytest.raises(TypeError, average_forward_solutions, [1, fwd]) # try an easy case fwd_copy = average_forward_solutions([fwd]) assert (isinstance(fwd_copy, Forward)) assert_array_equal(fwd['sol']['data'], fwd_copy['sol']['data']) # modify a fwd solution, save it, use MNE to average with old one fwd_copy['sol']['data'] *= 0.5 fname_copy = str(tmpdir.join('copy-fwd.fif')) write_forward_solution(fname_copy, fwd_copy, overwrite=True) cmd = ('mne_average_forward_solutions', '--fwd', fname_meeg, '--fwd', fname_copy, '--out', fname_copy) run_subprocess(cmd) # now let's actually do it, with one filename and one fwd fwd_ave = average_forward_solutions([fwd, fwd_copy]) assert_array_equal(0.75 * fwd['sol']['data'], fwd_ave['sol']['data']) # fwd_ave_mne = read_forward_solution(fname_copy) # assert_array_equal(fwd_ave_mne['sol']['data'], fwd_ave['sol']['data']) # with gradient fwd = read_forward_solution(fname_meeg_grad) fwd_ave = average_forward_solutions([fwd, fwd]) assert_forward_allclose(fwd, fwd_ave) @testing.requires_testing_data def test_priors(): """Test prior computations.""" # Depth prior fwd = read_forward_solution(fname_meeg) assert not is_fixed_orient(fwd) n_sources = fwd['nsource'] info = read_info(fname_evoked) depth_prior = compute_depth_prior(fwd, info, exp=0.8) assert depth_prior.shape == (3 * n_sources,) depth_prior = compute_depth_prior(fwd, info, exp=0.) assert_array_equal(depth_prior, 1.) with pytest.raises(ValueError, match='must be "whiten"'): compute_depth_prior(fwd, info, limit_depth_chs='foo') with pytest.raises(ValueError, match='noise_cov must be a Covariance'): compute_depth_prior(fwd, info, limit_depth_chs='whiten') fwd_fixed = convert_forward_solution(fwd, force_fixed=True) depth_prior = compute_depth_prior(fwd_fixed, info=info) assert depth_prior.shape == (n_sources,) # Orientation prior orient_prior = compute_orient_prior(fwd, 1.) assert_array_equal(orient_prior, 1.) orient_prior = compute_orient_prior(fwd_fixed, 0.) assert_array_equal(orient_prior, 1.) with pytest.raises(ValueError, match='oriented in surface coordinates'): compute_orient_prior(fwd, 0.5) fwd_surf_ori = convert_forward_solution(fwd, surf_ori=True) orient_prior = compute_orient_prior(fwd_surf_ori, 0.5) assert all(np.in1d(orient_prior, (0.5, 1.))) with pytest.raises(ValueError, match='between 0 and 1'): compute_orient_prior(fwd_surf_ori, -0.5) with pytest.raises(ValueError, match='with fixed orientation'): compute_orient_prior(fwd_fixed, 0.5) @testing.requires_testing_data def test_equalize_channels(): """Test equalization of channels for instances of Forward.""" fwd1 = read_forward_solution(fname_meeg) fwd1.pick_channels(['EEG 001', 'EEG 002', 'EEG 003']) fwd2 = fwd1.copy().pick_channels(['EEG 002', 'EEG 001'], ordered=True) fwd1, fwd2 = equalize_channels([fwd1, fwd2]) assert fwd1.ch_names == ['EEG 001', 'EEG 002'] assert fwd2.ch_names == ['EEG 001', 'EEG 002']
rkmaddox/mne-python
mne/forward/tests/test_forward.py
mne/gui/_marker_gui.py
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr> # Martin Luessi <mluessi@nmr.mgh.harvard.edu> # Denis Engemann <denis.engemann@gmail.com> # # License: BSD (3-clause) from collections import defaultdict from colorsys import hsv_to_rgb, rgb_to_hsv import copy as cp import os import os.path as op import re import numpy as np from .morph_map import read_morph_map from .parallel import parallel_func, check_n_jobs from .source_estimate import (SourceEstimate, VolSourceEstimate, _center_of_mass, extract_label_time_course, spatial_src_adjacency) from .source_space import (add_source_space_distances, SourceSpaces, read_freesurfer_lut, _import_nibabel) from .stats.cluster_level import _find_clusters, _get_components from .surface import read_surface, fast_cross_3d, mesh_edges, mesh_dist from .transforms import apply_trans from .utils import (get_subjects_dir, _check_subject, logger, verbose, warn, check_random_state, _validate_type, fill_doc, _check_option, check_version) def _blend_colors(color_1, color_2): """Blend two colors in HSV space. Parameters ---------- color_1, color_2 : None | tuple RGBA tuples with values between 0 and 1. None if no color is available. If both colors are None, the output is None. If only one is None, the output is the other color. Returns ------- color : None | tuple RGBA tuple of the combined color. Saturation, value and alpha are averaged, whereas the new hue is determined as angle half way between the two input colors' hues. """ if color_1 is None and color_2 is None: return None elif color_1 is None: return color_2 elif color_2 is None: return color_1 r_1, g_1, b_1, a_1 = color_1 h_1, s_1, v_1 = rgb_to_hsv(r_1, g_1, b_1) r_2, g_2, b_2, a_2 = color_2 h_2, s_2, v_2 = rgb_to_hsv(r_2, g_2, b_2) hue_diff = abs(h_1 - h_2) if hue_diff < 0.5: h = min(h_1, h_2) + hue_diff / 2. else: h = max(h_1, h_2) + (1. - hue_diff) / 2. h %= 1. s = (s_1 + s_2) / 2. v = (v_1 + v_2) / 2. r, g, b = hsv_to_rgb(h, s, v) a = (a_1 + a_2) / 2. color = (r, g, b, a) return color def _split_colors(color, n): """Create n colors in HSV space that occupy a gradient in value. Parameters ---------- color : tuple RGBA tuple with values between 0 and 1. n : int >= 2 Number of colors on the gradient. Returns ------- colors : tuple of tuples, len = n N RGBA tuples that occupy a gradient in value (low to high) but share saturation and hue with the input color. """ r, g, b, a = color h, s, v = rgb_to_hsv(r, g, b) gradient_range = np.sqrt(n / 10.) if v > 0.5: v_max = min(0.95, v + gradient_range / 2) v_min = max(0.05, v_max - gradient_range) else: v_min = max(0.05, v - gradient_range / 2) v_max = min(0.95, v_min + gradient_range) hsv_colors = ((h, s, v_) for v_ in np.linspace(v_min, v_max, n)) rgb_colors = (hsv_to_rgb(h_, s_, v_) for h_, s_, v_ in hsv_colors) rgba_colors = ((r_, g_, b_, a,) for r_, g_, b_ in rgb_colors) return tuple(rgba_colors) def _n_colors(n, bytes_=False, cmap='hsv'): """Produce a list of n unique RGBA color tuples based on a colormap. Parameters ---------- n : int Number of colors. bytes : bool Return colors as integers values between 0 and 255 (instead of floats between 0 and 1). cmap : str Which colormap to use. Returns ------- colors : array, shape (n, 4) RGBA color values. """ n_max = 2 ** 10 if n > n_max: raise NotImplementedError("Can't produce more than %i unique " "colors" % n_max) from matplotlib.cm import get_cmap cm = get_cmap(cmap, n_max) pos = np.linspace(0, 1, n, False) colors = cm(pos, bytes=bytes_) if bytes_: # make sure colors are unique for ii, c in enumerate(colors): if np.any(np.all(colors[:ii] == c, 1)): raise RuntimeError('Could not get %d unique colors from %s ' 'colormap. Try using a different colormap.' % (n, cmap)) return colors @fill_doc class Label(object): """A FreeSurfer/MNE label with vertices restricted to one hemisphere. Labels can be combined with the ``+`` operator: * Duplicate vertices are removed. * If duplicate vertices have conflicting position values, an error is raised. * Values of duplicate vertices are summed. Parameters ---------- vertices : array, shape (N,) Vertex indices (0 based). pos : array, shape (N, 3) | None Locations in meters. If None, then zeros are used. values : array, shape (N,) | None Values at the vertices. If None, then ones are used. hemi : 'lh' | 'rh' Hemisphere to which the label applies. comment : str Kept as information but not used by the object itself. name : str Kept as information but not used by the object itself. filename : str Kept as information but not used by the object itself. subject : str | None Name of the subject the label is from. color : None | matplotlib color Default label color and alpha (e.g., ``(1., 0., 0., 1.)`` for red). %(verbose)s Attributes ---------- color : None | tuple Default label color, represented as RGBA tuple with values between 0 and 1. comment : str Comment from the first line of the label file. hemi : 'lh' | 'rh' Hemisphere. name : None | str A name for the label. It is OK to change that attribute manually. pos : array, shape (N, 3) Locations in meters. subject : str | None Subject name. It is best practice to set this to the proper value on initialization, but it can also be set manually. values : array, shape (N,) Values at the vertices. %(verbose)s vertices : array, shape (N,) Vertex indices (0 based) """ @verbose def __init__(self, vertices=(), pos=None, values=None, hemi=None, comment="", name=None, filename=None, subject=None, color=None, verbose=None): # noqa: D102 # check parameters if not isinstance(hemi, str): raise ValueError('hemi must be a string, not %s' % type(hemi)) vertices = np.asarray(vertices, int) if np.any(np.diff(vertices.astype(int)) <= 0): raise ValueError('Vertices must be ordered in increasing order.') if color is not None: from matplotlib.colors import colorConverter color = colorConverter.to_rgba(color) if values is None: values = np.ones(len(vertices)) else: values = np.asarray(values) if pos is None: pos = np.zeros((len(vertices), 3)) else: pos = np.asarray(pos) if not (len(vertices) == len(values) == len(pos)): raise ValueError("vertices, values and pos need to have same " "length (number of vertices)") # name if name is None and filename is not None: name = op.basename(filename[:-6]) self.vertices = vertices self.pos = pos self.values = values self.hemi = hemi self.comment = comment self.verbose = verbose self.subject = _check_subject(None, subject, raise_error=False) self.color = color self.name = name self.filename = filename def __setstate__(self, state): # noqa: D105 self.vertices = state['vertices'] self.pos = state['pos'] self.values = state['values'] self.hemi = state['hemi'] self.comment = state['comment'] self.verbose = state['verbose'] self.subject = state.get('subject', None) self.color = state.get('color', None) self.name = state['name'] self.filename = state['filename'] def __getstate__(self): # noqa: D105 out = dict(vertices=self.vertices, pos=self.pos, values=self.values, hemi=self.hemi, comment=self.comment, verbose=self.verbose, subject=self.subject, color=self.color, name=self.name, filename=self.filename) return out def __repr__(self): # noqa: D105 name = 'unknown, ' if self.subject is None else self.subject + ', ' name += repr(self.name) if self.name is not None else "unnamed" n_vert = len(self) return "<Label | %s, %s : %i vertices>" % (name, self.hemi, n_vert) def __len__(self): """Return the number of vertices. Returns ------- n_vertices : int The number of vertices. """ return len(self.vertices) def __add__(self, other): """Add Labels.""" _validate_type(other, (Label, BiHemiLabel), 'other') if isinstance(other, BiHemiLabel): return other + self else: # isinstance(other, Label) if self.subject != other.subject: raise ValueError('Label subject parameters must match, got ' '"%s" and "%s". Consider setting the ' 'subject parameter on initialization, or ' 'setting label.subject manually before ' 'combining labels.' % (self.subject, other.subject)) if self.hemi != other.hemi: name = '%s + %s' % (self.name, other.name) if self.hemi == 'lh': lh, rh = self.copy(), other.copy() else: lh, rh = other.copy(), self.copy() color = _blend_colors(self.color, other.color) return BiHemiLabel(lh, rh, name, color) # check for overlap duplicates = np.intersect1d(self.vertices, other.vertices) n_dup = len(duplicates) if n_dup: self_dup = [np.where(self.vertices == d)[0][0] for d in duplicates] other_dup = [np.where(other.vertices == d)[0][0] for d in duplicates] if not np.all(self.pos[self_dup] == other.pos[other_dup]): err = ("Labels %r and %r: vertices overlap but differ in " "position values" % (self.name, other.name)) raise ValueError(err) isnew = np.array([v not in duplicates for v in other.vertices]) vertices = np.hstack((self.vertices, other.vertices[isnew])) pos = np.vstack((self.pos, other.pos[isnew])) # find position of other's vertices in new array tgt_idx = [np.where(vertices == v)[0][0] for v in other.vertices] n_self = len(self.values) n_other = len(other.values) new_len = n_self + n_other - n_dup values = np.zeros(new_len, dtype=self.values.dtype) values[:n_self] += self.values values[tgt_idx] += other.values else: vertices = np.hstack((self.vertices, other.vertices)) pos = np.vstack((self.pos, other.pos)) values = np.hstack((self.values, other.values)) indcs = np.argsort(vertices) vertices, pos, values = vertices[indcs], pos[indcs, :], values[indcs] comment = "%s + %s" % (self.comment, other.comment) name0 = self.name if self.name else 'unnamed' name1 = other.name if other.name else 'unnamed' name = "%s + %s" % (name0, name1) color = _blend_colors(self.color, other.color) verbose = self.verbose or other.verbose label = Label(vertices, pos, values, self.hemi, comment, name, None, self.subject, color, verbose) return label def __sub__(self, other): """Subtract Labels.""" _validate_type(other, (Label, BiHemiLabel), 'other') if isinstance(other, BiHemiLabel): if self.hemi == 'lh': return self - other.lh else: return self - other.rh else: # isinstance(other, Label): if self.subject != other.subject: raise ValueError('Label subject parameters must match, got ' '"%s" and "%s". Consider setting the ' 'subject parameter on initialization, or ' 'setting label.subject manually before ' 'combining labels.' % (self.subject, other.subject)) if self.hemi == other.hemi: keep = np.in1d(self.vertices, other.vertices, True, invert=True) else: keep = np.arange(len(self.vertices)) name = "%s - %s" % (self.name or 'unnamed', other.name or 'unnamed') return Label(self.vertices[keep], self.pos[keep], self.values[keep], self.hemi, self.comment, name, None, self.subject, self.color, self.verbose) def save(self, filename): r"""Write to disk as FreeSurfer \*.label file. Parameters ---------- filename : str Path to label file to produce. Notes ----- Note that due to file specification limitations, the Label's subject and color attributes are not saved to disk. """ write_label(filename, self) def copy(self): """Copy the label instance. Returns ------- label : instance of Label The copied label. """ return cp.deepcopy(self) def fill(self, src, name=None): """Fill the surface between sources for a source space label. Parameters ---------- src : SourceSpaces Source space in which the label was defined. If a source space is provided, the label is expanded to fill in surface vertices that lie between the vertices included in the source space. For the added vertices, ``pos`` is filled in with positions from the source space, and ``values`` is filled in from the closest source space vertex. name : None | str Name for the new Label (default is self.name). Returns ------- label : Label The label covering the same vertices in source space but also including intermediate surface vertices. See Also -------- Label.restrict Label.smooth """ # find source space patch info if len(self.vertices) == 0: return self.copy() hemi_src = _get_label_src(self, src) if not np.all(np.in1d(self.vertices, hemi_src['vertno'])): msg = "Source space does not contain all of the label's vertices" raise ValueError(msg) if hemi_src['nearest'] is None: warn("Source space is being modified in place because patch " "information is needed. To avoid this in the future, run " "mne.add_source_space_distances() on the source space " "and save it to disk.") if check_version('scipy', '1.3'): dist_limit = 0 else: warn('SciPy < 1.3 detected, adding source space patch ' 'information will be slower. Consider upgrading SciPy.') dist_limit = np.inf add_source_space_distances(src, dist_limit=dist_limit) nearest = hemi_src['nearest'] # find new vertices include = np.in1d(nearest, self.vertices, False) vertices = np.nonzero(include)[0] # values nearest_in_label = np.digitize(nearest[vertices], self.vertices, True) values = self.values[nearest_in_label] # pos pos = hemi_src['rr'][vertices] name = self.name if name is None else name label = Label(vertices, pos, values, self.hemi, self.comment, name, None, self.subject, self.color) return label def restrict(self, src, name=None): """Restrict a label to a source space. Parameters ---------- src : instance of SourceSpaces The source spaces to use to restrict the label. name : None | str Name for the new Label (default is self.name). Returns ------- label : instance of Label The Label restricted to the set of source space vertices. See Also -------- Label.fill Notes ----- .. versionadded:: 0.20 """ if len(self.vertices) == 0: return self.copy() hemi_src = _get_label_src(self, src) mask = np.in1d(self.vertices, hemi_src['vertno']) name = self.name if name is None else name label = Label(self.vertices[mask], self.pos[mask], self.values[mask], self.hemi, self.comment, name, None, self.subject, self.color) return label @verbose def smooth(self, subject=None, smooth=2, grade=None, subjects_dir=None, n_jobs=1, verbose=None): """Smooth the label. Useful for filling in labels made in a decimated source space for display. Parameters ---------- subject : str | None The name of the subject used. If None, the value will be taken from self.subject. smooth : int Number of iterations for the smoothing of the surface data. Cannot be None here since not all vertices are used. For a grade of 5 (e.g., fsaverage), a smoothing of 2 will fill a label. grade : int, list of shape (2,), array, or None Resolution of the icosahedral mesh (typically 5). If None, all vertices will be used (potentially filling the surface). If a list, values will be morphed to the set of vertices specified in grade[0] and grade[1], assuming that these are vertices for the left and right hemispheres. Note that specifying the vertices (e.g., grade=[np.arange(10242), np.arange(10242)] for fsaverage on a standard grade 5 source space) can be substantially faster than computing vertex locations. If one array is used, it is assumed that all vertices belong to the hemisphere of the label. To create a label filling the surface, use None. %(subjects_dir)s %(n_jobs)s %(verbose_meth)s Returns ------- label : instance of Label The smoothed label. Notes ----- This function will set label.pos to be all zeros. If the positions on the new surface are required, consider using mne.read_surface with ``label.vertices``. """ subject = _check_subject(self.subject, subject) return self.morph(subject, subject, smooth, grade, subjects_dir, n_jobs, verbose) @verbose def morph(self, subject_from=None, subject_to=None, smooth=5, grade=None, subjects_dir=None, n_jobs=1, verbose=None): """Morph the label. Useful for transforming a label from one subject to another. Parameters ---------- subject_from : str | None The name of the subject of the current label. If None, the initial subject will be taken from self.subject. subject_to : str The name of the subject to morph the label to. This will be put in label.subject of the output label file. smooth : int Number of iterations for the smoothing of the surface data. Cannot be None here since not all vertices are used. grade : int, list of shape (2,), array, or None Resolution of the icosahedral mesh (typically 5). If None, all vertices will be used (potentially filling the surface). If a list, values will be morphed to the set of vertices specified in grade[0] and grade[1], assuming that these are vertices for the left and right hemispheres. Note that specifying the vertices (e.g., ``grade=[np.arange(10242), np.arange(10242)]`` for fsaverage on a standard grade 5 source space) can be substantially faster than computing vertex locations. If one array is used, it is assumed that all vertices belong to the hemisphere of the label. To create a label filling the surface, use None. %(subjects_dir)s %(n_jobs)s %(verbose_meth)s Returns ------- label : instance of Label The morphed label. See Also -------- mne.morph_labels : Morph a set of labels. Notes ----- This function will set label.pos to be all zeros. If the positions on the new surface are required, consider using `mne.read_surface` with ``label.vertices``. """ from .morph import compute_source_morph, grade_to_vertices subject_from = _check_subject(self.subject, subject_from) if not isinstance(subject_to, str): raise TypeError('"subject_to" must be entered as a string') if not isinstance(smooth, int): raise TypeError('smooth must be an integer') if np.all(self.values == 0): raise ValueError('Morphing label with all zero values will result ' 'in the label having no vertices. Consider using ' 'something like label.values.fill(1.0).') idx = 0 if self.hemi == 'lh' else 1 if isinstance(grade, np.ndarray): grade_ = [np.array([], int)] * 2 grade_[idx] = grade grade = grade_ del grade_ grade = grade_to_vertices(subject_to, grade, subjects_dir=subjects_dir) spacing = [np.array([], int)] * 2 spacing[idx] = grade[idx] vertices = [np.array([], int)] * 2 vertices[idx] = self.vertices data = self.values[:, np.newaxis] assert len(data) == sum(len(v) for v in vertices) stc = SourceEstimate(data, vertices, tmin=1, tstep=1, subject=subject_from) stc = compute_source_morph( stc, subject_from, subject_to, spacing=spacing, smooth=smooth, subjects_dir=subjects_dir, warn=False).apply(stc) inds = np.nonzero(stc.data)[0] self.values = stc.data[inds, :].ravel() self.pos = np.zeros((len(inds), 3)) self.vertices = stc.vertices[idx][inds] self.subject = subject_to return self @fill_doc def split(self, parts=2, subject=None, subjects_dir=None, freesurfer=False): """Split the Label into two or more parts. Parameters ---------- parts : int >= 2 | tuple of str | str Number of labels to create (default is 2), or tuple of strings specifying label names for new labels (from posterior to anterior), or 'contiguous' to split the label into connected components. If a number or 'contiguous' is specified, names of the new labels will be the input label's name with div1, div2 etc. appended. subject : None | str Subject which this label belongs to (needed to locate surface file; should only be specified if it is not specified in the label). %(subjects_dir)s freesurfer : bool By default (``False``) ``split_label`` uses an algorithm that is slightly optimized for performance and numerical precision. Set ``freesurfer`` to ``True`` in order to replicate label splits from FreeSurfer's ``mris_divide_parcellation``. Returns ------- labels : list of Label, shape (n_parts,) The labels, starting from the lowest to the highest end of the projection axis. Notes ----- If using 'contiguous' split, you must ensure that the label being split uses the same triangular resolution as the surface mesh files in ``subjects_dir`` Also, some small fringe labels may be returned that are close (but not connected) to the large components. The spatial split finds the label's principal eigen-axis on the spherical surface, projects all label vertex coordinates onto this axis, and divides them at regular spatial intervals. """ if isinstance(parts, str) and parts == 'contiguous': return _split_label_contig(self, subject, subjects_dir) elif isinstance(parts, (tuple, int)): return split_label(self, parts, subject, subjects_dir, freesurfer) else: raise ValueError("Need integer, tuple of strings, or string " "('contiguous'). Got %s)" % type(parts)) def get_vertices_used(self, vertices=None): """Get the source space's vertices inside the label. Parameters ---------- vertices : ndarray of int, shape (n_vertices,) | None The set of vertices to compare the label to. If None, equals to ``np.arange(10242)``. Defaults to None. Returns ------- label_verts : ndarray of in, shape (n_label_vertices,) The vertices of the label corresponding used by the data. """ if vertices is None: vertices = np.arange(10242) label_verts = vertices[np.in1d(vertices, self.vertices)] return label_verts def get_tris(self, tris, vertices=None): """Get the source space's triangles inside the label. Parameters ---------- tris : ndarray of int, shape (n_tris, 3) The set of triangles corresponding to the vertices in a source space. vertices : ndarray of int, shape (n_vertices,) | None The set of vertices to compare the label to. If None, equals to ``np.arange(10242)``. Defaults to None. Returns ------- label_tris : ndarray of int, shape (n_tris, 3) The subset of tris used by the label. """ vertices_ = self.get_vertices_used(vertices) selection = np.all(np.in1d(tris, vertices_).reshape(tris.shape), axis=1) label_tris = tris[selection] if len(np.unique(label_tris)) < len(vertices_): logger.info('Surprising label structure. Trying to repair ' 'triangles.') dropped_vertices = np.setdiff1d(vertices_, label_tris) n_dropped = len(dropped_vertices) assert n_dropped == (len(vertices_) - len(np.unique(label_tris))) # put missing vertices as extra zero-length triangles add_tris = (dropped_vertices + np.zeros((len(dropped_vertices), 3), dtype=int).T) label_tris = np.r_[label_tris, add_tris.T] assert len(np.unique(label_tris)) == len(vertices_) return label_tris @fill_doc def center_of_mass(self, subject=None, restrict_vertices=False, subjects_dir=None, surf='sphere'): """Compute the center of mass of the label. This function computes the spatial center of mass on the surface as in :footcite:`LarsonLee2013`. Parameters ---------- subject : str | None The subject the label is defined for. restrict_vertices : bool | array of int | instance of SourceSpaces If True, returned vertex will be one from the label. Otherwise, it could be any vertex from surf. If an array of int, the returned vertex will come from that array. If instance of SourceSpaces (as of 0.13), the returned vertex will be from the given source space. For most accuruate estimates, do not restrict vertices. %(subjects_dir)s surf : str The surface to use for Euclidean distance center of mass finding. The default here is "sphere", which finds the center of mass on the spherical surface to help avoid potential issues with cortical folding. Returns ------- vertex : int Vertex of the spatial center of mass for the inferred hemisphere, with each vertex weighted by its label value. See Also -------- SourceEstimate.center_of_mass vertex_to_mni Notes ----- .. versionadded:: 0.13 References ---------- .. footbibliography:: """ if not isinstance(surf, str): raise TypeError('surf must be a string, got %s' % (type(surf),)) subject = _check_subject(self.subject, subject) if np.any(self.values < 0): raise ValueError('Cannot compute COM with negative values') if np.all(self.values == 0): raise ValueError('Cannot compute COM with all values == 0. For ' 'structural labels, consider setting to ones via ' 'label.values[:] = 1.') vertex = _center_of_mass(self.vertices, self.values, self.hemi, surf, subject, subjects_dir, restrict_vertices) return vertex def _get_label_src(label, src): _validate_type(src, SourceSpaces, 'src') if src.kind != 'surface': raise RuntimeError('Cannot operate on SourceSpaces that are not ' 'surface type, got %s' % (src.kind,)) if label.hemi == 'lh': hemi_src = src[0] else: hemi_src = src[1] return hemi_src class BiHemiLabel(object): """A freesurfer/MNE label with vertices in both hemispheres. Parameters ---------- lh : Label Label for the left hemisphere. rh : Label Label for the right hemisphere. name : None | str Name for the label. color : None | color Label color and alpha (e.g., ``(1., 0., 0., 1.)`` for red). Note that due to file specification limitations, the color isn't saved to or loaded from files written to disk. Attributes ---------- lh : Label Label for the left hemisphere. rh : Label Label for the right hemisphere. name : None | str A name for the label. It is OK to change that attribute manually. subject : str | None Subject the label is from. """ def __init__(self, lh, rh, name=None, color=None): # noqa: D102 if lh.subject != rh.subject: raise ValueError('lh.subject (%s) and rh.subject (%s) must ' 'agree' % (lh.subject, rh.subject)) self.lh = lh self.rh = rh self.name = name self.subject = lh.subject self.color = color self.hemi = 'both' def __repr__(self): # noqa: D105 temp = "<BiHemiLabel | %s, lh : %i vertices, rh : %i vertices>" name = 'unknown, ' if self.subject is None else self.subject + ', ' name += repr(self.name) if self.name is not None else "unnamed" return temp % (name, len(self.lh), len(self.rh)) def __len__(self): """Return the number of vertices. Returns ------- n_vertices : int The number of vertices. """ return len(self.lh) + len(self.rh) def __add__(self, other): """Add labels.""" if isinstance(other, Label): if other.hemi == 'lh': lh = self.lh + other rh = self.rh else: lh = self.lh rh = self.rh + other elif isinstance(other, BiHemiLabel): lh = self.lh + other.lh rh = self.rh + other.rh else: raise TypeError("Need: Label or BiHemiLabel. Got: %r" % other) name = '%s + %s' % (self.name, other.name) color = _blend_colors(self.color, other.color) return BiHemiLabel(lh, rh, name, color) def __sub__(self, other): """Subtract labels.""" _validate_type(other, (Label, BiHemiLabel), 'other') if isinstance(other, Label): if other.hemi == 'lh': lh = self.lh - other rh = self.rh else: rh = self.rh - other lh = self.lh else: # isinstance(other, BiHemiLabel) lh = self.lh - other.lh rh = self.rh - other.rh if len(lh.vertices) == 0: return rh elif len(rh.vertices) == 0: return lh else: name = '%s - %s' % (self.name, other.name) return BiHemiLabel(lh, rh, name, self.color) def read_label(filename, subject=None, color=None): """Read FreeSurfer Label file. Parameters ---------- filename : str Path to label file. subject : str | None Name of the subject the data are defined for. It is good practice to set this attribute to avoid combining incompatible labels and SourceEstimates (e.g., ones from other subjects). Note that due to file specification limitations, the subject name isn't saved to or loaded from files written to disk. color : None | matplotlib color Default label color and alpha (e.g., ``(1., 0., 0., 1.)`` for red). Note that due to file specification limitations, the color isn't saved to or loaded from files written to disk. Returns ------- label : Label Instance of Label object with attributes: - ``comment``: comment from the first line of the label file - ``vertices``: vertex indices (0 based, column 1) - ``pos``: locations in meters (columns 2 - 4 divided by 1000) - ``values``: values at the vertices (column 5) See Also -------- read_labels_from_annot write_labels_to_annot """ if subject is not None and not isinstance(subject, str): raise TypeError('subject must be a string') # find hemi basename = op.basename(filename) if basename.endswith('lh.label') or basename.startswith('lh.'): hemi = 'lh' elif basename.endswith('rh.label') or basename.startswith('rh.'): hemi = 'rh' else: raise ValueError('Cannot find which hemisphere it is. File should end' ' with lh.label or rh.label: %s' % (basename,)) # find name if basename.startswith(('lh.', 'rh.')): basename_ = basename[3:] if basename.endswith('.label'): basename_ = basename[:-6] else: basename_ = basename[:-9] name = "%s-%s" % (basename_, hemi) # read the file with open(filename, 'r') as fid: comment = fid.readline().replace('\n', '')[1:] nv = int(fid.readline()) data = np.empty((5, nv)) for i, line in enumerate(fid): data[:, i] = line.split() # let's make sure everything is ordered correctly vertices = np.array(data[0], dtype=np.int32) pos = 1e-3 * data[1:4].T values = data[4] order = np.argsort(vertices) vertices = vertices[order] pos = pos[order] values = values[order] label = Label(vertices, pos, values, hemi, comment, name, filename, subject, color) return label @verbose def write_label(filename, label, verbose=None): """Write a FreeSurfer label. Parameters ---------- filename : str Path to label file to produce. label : Label The label object to save. %(verbose)s See Also -------- write_labels_to_annot Notes ----- Note that due to file specification limitations, the Label's subject and color attributes are not saved to disk. """ hemi = label.hemi path_head, name = op.split(filename) if name.endswith('.label'): name = name[:-6] if not (name.startswith(hemi) or name.endswith(hemi)): name += '-' + hemi filename = op.join(path_head, name) + '.label' logger.info('Saving label to : %s' % filename) with open(filename, 'wb') as fid: n_vertices = len(label.vertices) data = np.zeros((n_vertices, 5), dtype=np.float64) data[:, 0] = label.vertices data[:, 1:4] = 1e3 * label.pos data[:, 4] = label.values fid.write(b'#%s\n' % label.comment.encode()) fid.write(b'%d\n' % n_vertices) for d in data: fid.write(b'%d %f %f %f %f\n' % tuple(d)) def _prep_label_split(label, subject=None, subjects_dir=None): """Get label and subject information prior to label splitting.""" # If necessary, find the label if isinstance(label, BiHemiLabel): raise TypeError("Can only split labels restricted to one hemisphere.") elif isinstance(label, str): label = read_label(label) # Find the subject subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) if label.subject is None and subject is None: raise ValueError("The subject needs to be specified.") elif subject is None: subject = label.subject elif label.subject is None: pass elif subject != label.subject: raise ValueError("The label specifies a different subject (%r) from " "the subject parameter (%r)." % label.subject, subject) return label, subject, subjects_dir def _split_label_contig(label_to_split, subject=None, subjects_dir=None): """Split label into contiguous regions (i.e., connected components). Parameters ---------- label_to_split : Label | str Label which is to be split (Label object or path to a label file). subject : None | str Subject which this label belongs to (needed to locate surface file; should only be specified if it is not specified in the label). %(subjects_dir)s Returns ------- labels : list of Label The contiguous labels, in order of descending size. """ # Convert to correct input if necessary label_to_split, subject, subjects_dir = _prep_label_split(label_to_split, subject, subjects_dir) # Find the spherical surface to get vertices and tris surf_fname = '.'.join((label_to_split.hemi, 'sphere')) surf_path = op.join(subjects_dir, subject, 'surf', surf_fname) surface_points, surface_tris = read_surface(surf_path) # Get vertices we want to keep and compute mesh edges verts_arr = label_to_split.vertices edges_all = mesh_edges(surface_tris) # Subselect rows and cols of vertices that belong to the label select_edges = edges_all[verts_arr][:, verts_arr].tocoo() # Compute connected components and store as lists of vertex numbers comp_labels = _get_components(verts_arr, select_edges) # Convert to indices in the original surface space label_divs = [] for comp in comp_labels: label_divs.append(verts_arr[comp]) # Construct label division names n_parts = len(label_divs) if label_to_split.name.endswith(('lh', 'rh')): basename = label_to_split.name[:-3] name_ext = label_to_split.name[-3:] else: basename = label_to_split.name name_ext = '' name_pattern = "%s_div%%i%s" % (basename, name_ext) names = tuple(name_pattern % i for i in range(1, n_parts + 1)) # Colors if label_to_split.color is None: colors = (None,) * n_parts else: colors = _split_colors(label_to_split.color, n_parts) # Sort label divisions by their size (in vertices) label_divs.sort(key=lambda x: len(x), reverse=True) labels = [] for div, name, color in zip(label_divs, names, colors): # Get indices of dipoles within this division of the label verts = np.array(sorted(list(div)), int) vert_indices = np.in1d(verts_arr, verts, assume_unique=True) # Set label attributes pos = label_to_split.pos[vert_indices] values = label_to_split.values[vert_indices] hemi = label_to_split.hemi comment = label_to_split.comment lbl = Label(verts, pos, values, hemi, comment, name, None, subject, color) labels.append(lbl) return labels @fill_doc def split_label(label, parts=2, subject=None, subjects_dir=None, freesurfer=False): """Split a Label into two or more parts. Parameters ---------- label : Label | str Label which is to be split (Label object or path to a label file). parts : int >= 2 | tuple of str A sequence of strings specifying label names for the new labels (from posterior to anterior), or the number of new labels to create (default is 2). If a number is specified, names of the new labels will be the input label's name with div1, div2 etc. appended. subject : None | str Subject which this label belongs to (needed to locate surface file; should only be specified if it is not specified in the label). %(subjects_dir)s freesurfer : bool By default (``False``) ``split_label`` uses an algorithm that is slightly optimized for performance and numerical precision. Set ``freesurfer`` to ``True`` in order to replicate label splits from FreeSurfer's ``mris_divide_parcellation``. Returns ------- labels : list of Label, shape (n_parts,) The labels, starting from the lowest to the highest end of the projection axis. Notes ----- Works by finding the label's principal eigen-axis on the spherical surface, projecting all label vertex coordinates onto this axis and dividing them at regular spatial intervals. """ from scipy import linalg label, subject, subjects_dir = _prep_label_split(label, subject, subjects_dir) # find the parts if np.isscalar(parts): n_parts = int(parts) if label.name.endswith(('lh', 'rh')): basename = label.name[:-3] name_ext = label.name[-3:] else: basename = label.name name_ext = '' name_pattern = "%s_div%%i%s" % (basename, name_ext) names = tuple(name_pattern % i for i in range(1, n_parts + 1)) else: names = parts n_parts = len(names) if n_parts < 2: raise ValueError("Can't split label into %i parts" % n_parts) # find the spherical surface surf_fname = '.'.join((label.hemi, 'sphere')) surf_path = op.join(subjects_dir, subject, "surf", surf_fname) surface_points, surface_tris = read_surface(surf_path) # find the label coordinates on the surface points = surface_points[label.vertices] center = np.mean(points, axis=0) centered_points = points - center # find the label's normal if freesurfer: # find the Freesurfer vertex closest to the center distance = np.sqrt(np.sum(centered_points ** 2, axis=1)) i_closest = np.argmin(distance) closest_vertex = label.vertices[i_closest] # find the normal according to freesurfer convention idx = np.any(surface_tris == closest_vertex, axis=1) tris_for_normal = surface_tris[idx] r1 = surface_points[tris_for_normal[:, 0], :] r2 = surface_points[tris_for_normal[:, 1], :] r3 = surface_points[tris_for_normal[:, 2], :] tri_normals = fast_cross_3d((r2 - r1), (r3 - r1)) normal = np.mean(tri_normals, axis=0) normal /= linalg.norm(normal) else: # Normal of the center normal = center / linalg.norm(center) # project all vertex coordinates on the tangential plane for this point q, _ = linalg.qr(normal[:, np.newaxis]) tangent_u = q[:, 1:] m_obs = np.dot(centered_points, tangent_u) # find principal eigendirection m_cov = np.dot(m_obs.T, m_obs) w, vr = linalg.eig(m_cov) i = np.argmax(w) eigendir = vr[:, i] # project back into 3d space axis = np.dot(tangent_u, eigendir) # orient them from posterior to anterior if axis[1] < 0: axis *= -1 # project the label on the axis proj = np.dot(points, axis) # assign mark (new label index) proj -= proj.min() proj /= (proj.max() / n_parts) mark = proj // 1 mark[mark == n_parts] = n_parts - 1 # colors if label.color is None: colors = (None,) * n_parts else: colors = _split_colors(label.color, n_parts) # construct new labels labels = [] for i, name, color in zip(range(n_parts), names, colors): idx = (mark == i) vert = label.vertices[idx] pos = label.pos[idx] values = label.values[idx] hemi = label.hemi comment = label.comment lbl = Label(vert, pos, values, hemi, comment, name, None, subject, color) labels.append(lbl) return labels def label_sign_flip(label, src): """Compute sign for label averaging. Parameters ---------- label : Label | BiHemiLabel A label. src : SourceSpaces The source space over which the label is defined. Returns ------- flip : array Sign flip vector (contains 1 or -1). """ from scipy import linalg if len(src) != 2: raise ValueError('Only source spaces with 2 hemisphers are accepted') lh_vertno = src[0]['vertno'] rh_vertno = src[1]['vertno'] # get source orientations ori = list() if label.hemi in ('lh', 'both'): vertices = label.vertices if label.hemi == 'lh' else label.lh.vertices vertno_sel = np.intersect1d(lh_vertno, vertices) ori.append(src[0]['nn'][vertno_sel]) if label.hemi in ('rh', 'both'): vertices = label.vertices if label.hemi == 'rh' else label.rh.vertices vertno_sel = np.intersect1d(rh_vertno, vertices) ori.append(src[1]['nn'][vertno_sel]) if len(ori) == 0: raise Exception('Unknown hemisphere type "%s"' % (label.hemi,)) ori = np.concatenate(ori, axis=0) if len(ori) == 0: return np.array([], int) _, _, Vh = linalg.svd(ori, full_matrices=False) # The sign of Vh is ambiguous, so we should align to the max-positive # (outward) direction dots = np.dot(ori, Vh[0]) if np.mean(dots) < 0: dots *= -1 # Comparing to the direction of the first right singular vector flip = np.sign(dots) return flip @verbose def stc_to_label(stc, src=None, smooth=True, connected=False, subjects_dir=None, verbose=None): """Compute a label from the non-zero sources in an stc object. Parameters ---------- stc : SourceEstimate The source estimates. src : SourceSpaces | str | None The source space over which the source estimates are defined. If it's a string it should the subject name (e.g. fsaverage). Can be None if stc.subject is not None. smooth : bool Fill in vertices on the cortical surface that are not in the source space based on the closest source space vertex (requires src to be a SourceSpace). connected : bool If True a list of connected labels will be returned in each hemisphere. The labels are ordered in decreasing order depending of the maximum value in the stc. %(subjects_dir)s %(verbose)s Returns ------- labels : list of Label | list of list of Label The generated labels. If connected is False, it returns a list of Labels (one per hemisphere). If no Label is available in a hemisphere, None is returned. If connected is True, it returns for each hemisphere a list of connected labels ordered in decreasing order depending of the maximum value in the stc. If no Label is available in an hemisphere, an empty list is returned. """ if not isinstance(smooth, bool): raise ValueError('smooth should be True or False. Got %s.' % smooth) src = stc.subject if src is None else src if src is None: raise ValueError('src cannot be None if stc.subject is None') if isinstance(src, str): subject = src else: subject = stc.subject if not isinstance(stc, SourceEstimate): raise ValueError('SourceEstimate should be surface source estimates') if isinstance(src, str): if connected: raise ValueError('The option to return only connected labels is ' 'only available if source spaces are provided.') if smooth: msg = ("stc_to_label with smooth=True requires src to be an " "instance of SourceSpace") raise ValueError(msg) subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) surf_path_from = op.join(subjects_dir, src, 'surf') rr_lh, tris_lh = read_surface(op.join(surf_path_from, 'lh.white')) rr_rh, tris_rh = read_surface(op.join(surf_path_from, 'rh.white')) rr = [rr_lh, rr_rh] tris = [tris_lh, tris_rh] else: if not isinstance(src, SourceSpaces): raise TypeError('src must be a string or a set of source spaces') if len(src) != 2: raise ValueError('source space should contain the 2 hemispheres') rr = [1e3 * src[0]['rr'], 1e3 * src[1]['rr']] tris = [src[0]['tris'], src[1]['tris']] src_conn = spatial_src_adjacency(src).tocsr() labels = [] cnt = 0 cnt_full = 0 for hemi_idx, (hemi, this_vertno, this_tris, this_rr) in enumerate( zip(['lh', 'rh'], stc.vertices, tris, rr)): this_data = stc.data[cnt:cnt + len(this_vertno)] if connected: # we know src *must* be a SourceSpaces now vertno = np.where(src[hemi_idx]['inuse'])[0] if not len(np.setdiff1d(this_vertno, vertno)) == 0: raise RuntimeError('stc contains vertices not present ' 'in source space, did you morph?') tmp = np.zeros((len(vertno), this_data.shape[1])) this_vertno_idx = np.searchsorted(vertno, this_vertno) tmp[this_vertno_idx] = this_data this_data = tmp offset = cnt_full + len(this_data) this_src_adj = src_conn[cnt_full:offset, cnt_full:offset].tocoo() this_data_abs_max = np.abs(this_data).max(axis=1) clusters, _ = _find_clusters(this_data_abs_max, 0., adjacency=this_src_adj) cnt_full += len(this_data) # Then order clusters in descending order based on maximum value clusters_max = np.argsort([np.max(this_data_abs_max[c]) for c in clusters])[::-1] clusters = [clusters[k] for k in clusters_max] clusters = [vertno[c] for c in clusters] else: clusters = [this_vertno[np.any(this_data, axis=1)]] cnt += len(this_vertno) clusters = [c for c in clusters if len(c) > 0] if len(clusters) == 0: if not connected: this_labels = None else: this_labels = [] else: this_labels = [] colors = _n_colors(len(clusters)) for c, color in zip(clusters, colors): idx_use = c label = Label(idx_use, this_rr[idx_use], None, hemi, 'Label from stc', subject=subject, color=color) if smooth: label = label.fill(src) this_labels.append(label) if not connected: this_labels = this_labels[0] labels.append(this_labels) return labels def _verts_within_dist(graph, sources, max_dist): """Find all vertices wihin a maximum geodesic distance from source. Parameters ---------- graph : scipy.sparse.csr_matrix Sparse matrix with distances between adjacent vertices. sources : list of int Source vertices. max_dist : float Maximum geodesic distance. Returns ------- verts : array Vertices within max_dist. dist : array Distances from source vertex. """ dist_map = {} verts_added_last = [] for source in sources: dist_map[source] = 0 verts_added_last.append(source) # add neighbors until no more neighbors within max_dist can be found while len(verts_added_last) > 0: verts_added = [] for i in verts_added_last: v_dist = dist_map[i] row = graph[i, :] neighbor_vert = row.indices neighbor_dist = row.data for j, d in zip(neighbor_vert, neighbor_dist): n_dist = v_dist + d if j in dist_map: if n_dist < dist_map[j]: dist_map[j] = n_dist else: if n_dist <= max_dist: dist_map[j] = n_dist # we found a new vertex within max_dist verts_added.append(j) verts_added_last = verts_added verts = np.sort(np.array(list(dist_map.keys()), int)) dist = np.array([dist_map[v] for v in verts], int) return verts, dist def _grow_labels(seeds, extents, hemis, names, dist, vert, subject): """Parallelize grow_labels.""" labels = [] for seed, extent, hemi, name in zip(seeds, extents, hemis, names): label_verts, label_dist = _verts_within_dist(dist[hemi], seed, extent) # create a label if len(seed) == 1: seed_repr = str(seed) else: seed_repr = ','.join(map(str, seed)) comment = 'Circular label: seed=%s, extent=%0.1fmm' % (seed_repr, extent) label = Label(vertices=label_verts, pos=vert[hemi][label_verts], values=label_dist, hemi=hemi, comment=comment, name=str(name), subject=subject) labels.append(label) return labels @fill_doc def grow_labels(subject, seeds, extents, hemis, subjects_dir=None, n_jobs=1, overlap=True, names=None, surface='white', colors=None): """Generate circular labels in source space with region growing. This function generates a number of labels in source space by growing regions starting from the vertices defined in "seeds". For each seed, a label is generated containing all vertices within a maximum geodesic distance on the white matter surface from the seed. Parameters ---------- subject : str Name of the subject as in SUBJECTS_DIR. seeds : int | list Seed, or list of seeds. Each seed can be either a vertex number or a list of vertex numbers. extents : array | float Extents (radius in mm) of the labels. hemis : array | int Hemispheres to use for the labels (0: left, 1: right). %(subjects_dir)s %(n_jobs)s Likely only useful if tens or hundreds of labels are being expanded simultaneously. Does not apply with ``overlap=False``. overlap : bool Produce overlapping labels. If True (default), the resulting labels can be overlapping. If False, each label will be grown one step at a time, and occupied territory will not be invaded. names : None | list of str Assign names to the new labels (list needs to have the same length as seeds). surface : str The surface used to grow the labels, defaults to the white surface. colors : array, shape (n, 4) or (, 4) | None How to assign colors to each label. If None then unique colors will be chosen automatically (default), otherwise colors will be broadcast from the array. The first three values will be interpreted as RGB colors and the fourth column as the alpha value (commonly 1). Returns ------- labels : list of Label The labels' ``comment`` attribute contains information on the seed vertex and extent; the ``values`` attribute contains distance from the seed in millimeters. Notes ----- "extents" and "hemis" can either be arrays with the same length as seeds, which allows using a different extent and hemisphere for label, or integers, in which case the same extent and hemisphere is used for each label. """ subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) n_jobs = check_n_jobs(n_jobs) # make sure the inputs are arrays if np.isscalar(seeds): seeds = [seeds] seeds = [np.atleast_1d(seed) for seed in seeds] extents = np.atleast_1d(extents) hemis = np.atleast_1d(hemis) n_seeds = len(seeds) if len(extents) != 1 and len(extents) != n_seeds: raise ValueError('The extents parameter has to be of length 1 or ' 'len(seeds)') if len(hemis) != 1 and len(hemis) != n_seeds: raise ValueError('The hemis parameter has to be of length 1 or ' 'len(seeds)') if colors is not None: if len(colors.shape) == 1: # if one color for all seeds n_colors = 1 n = colors.shape[0] else: n_colors, n = colors.shape if n_colors != n_seeds and n_colors != 1: msg = ('Number of colors (%d) and seeds (%d) are not compatible.' % (n_colors, n_seeds)) raise ValueError(msg) if n != 4: msg = 'Colors must have 4 values (RGB and alpha), not %d.' % n raise ValueError(msg) # make the arrays the same length as seeds if len(extents) == 1: extents = np.tile(extents, n_seeds) if len(hemis) == 1: hemis = np.tile(hemis, n_seeds) hemis = np.array(['lh' if h == 0 else 'rh' for h in hemis]) # names if names is None: names = ["Label_%i-%s" % items for items in enumerate(hemis)] else: if np.isscalar(names): names = [names] if len(names) != n_seeds: raise ValueError('The names parameter has to be None or have ' 'length len(seeds)') for i, hemi in enumerate(hemis): if not names[i].endswith(hemi): names[i] = '-'.join((names[i], hemi)) names = np.array(names) # load the surfaces and create the distance graphs tris, vert, dist = {}, {}, {} for hemi in set(hemis): surf_fname = op.join(subjects_dir, subject, 'surf', hemi + '.' + surface) vert[hemi], tris[hemi] = read_surface(surf_fname) dist[hemi] = mesh_dist(tris[hemi], vert[hemi]) if overlap: # create the patches parallel, my_grow_labels, _ = parallel_func(_grow_labels, n_jobs) seeds = np.array_split(np.array(seeds, dtype='O'), n_jobs) extents = np.array_split(extents, n_jobs) hemis = np.array_split(hemis, n_jobs) names = np.array_split(names, n_jobs) labels = sum(parallel(my_grow_labels(s, e, h, n, dist, vert, subject) for s, e, h, n in zip(seeds, extents, hemis, names)), []) else: # special procedure for non-overlapping labels labels = _grow_nonoverlapping_labels(subject, seeds, extents, hemis, vert, dist, names) if colors is None: # add a unique color to each label label_colors = _n_colors(len(labels)) else: # use specified colors label_colors = np.empty((len(labels), 4)) label_colors[:] = colors for label, color in zip(labels, label_colors): label.color = color return labels def _grow_nonoverlapping_labels(subject, seeds_, extents_, hemis, vertices_, graphs, names_): """Grow labels while ensuring that they don't overlap.""" labels = [] for hemi in set(hemis): hemi_index = (hemis == hemi) seeds = [seed for seed, h in zip(seeds_, hemis) if h == hemi] extents = extents_[hemi_index] names = names_[hemi_index] graph = graphs[hemi] # distance graph n_vertices = len(vertices_[hemi]) n_labels = len(seeds) # prepare parcellation parc = np.empty(n_vertices, dtype='int32') parc[:] = -1 # initialize active sources sources = {} # vert -> (label, dist_from_seed) edge = [] # queue of vertices to process for label, seed in enumerate(seeds): if np.any(parc[seed] >= 0): raise ValueError("Overlapping seeds") parc[seed] = label for s in np.atleast_1d(seed): sources[s] = (label, 0.) edge.append(s) # grow from sources while edge: vert_from = edge.pop(0) label, old_dist = sources[vert_from] # add neighbors within allowable distance row = graph[vert_from, :] for vert_to, dist in zip(row.indices, row.data): # Prevent adding a point that has already been used # (prevents infinite loop) if (vert_to == seeds[label]).any(): continue new_dist = old_dist + dist # abort if outside of extent if new_dist > extents[label]: continue vert_to_label = parc[vert_to] if vert_to_label >= 0: _, vert_to_dist = sources[vert_to] # abort if the vertex is occupied by a closer seed if new_dist > vert_to_dist: continue elif vert_to in edge: edge.remove(vert_to) # assign label value parc[vert_to] = label sources[vert_to] = (label, new_dist) edge.append(vert_to) # convert parc to labels for i in range(n_labels): vertices = np.nonzero(parc == i)[0] name = str(names[i]) label_ = Label(vertices, hemi=hemi, name=name, subject=subject) labels.append(label_) return labels @fill_doc def random_parcellation(subject, n_parcel, hemi, subjects_dir=None, surface='white', random_state=None): """Generate random cortex parcellation by growing labels. This function generates a number of labels which don't intersect and cover the whole surface. Regions are growing around randomly chosen seeds. Parameters ---------- subject : str Name of the subject as in SUBJECTS_DIR. n_parcel : int Total number of cortical parcels. hemi : str Hemisphere id (ie 'lh', 'rh', 'both'). In the case of 'both', both hemispheres are processed with (n_parcel // 2) parcels per hemisphere. %(subjects_dir)s surface : str The surface used to grow the labels, defaults to the white surface. %(random_state)s Returns ------- labels : list of Label Random cortex parcellation. """ subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) if hemi == 'both': hemi = ['lh', 'rh'] hemis = np.atleast_1d(hemi) # load the surfaces and create the distance graphs tris, vert, dist = {}, {}, {} for hemi in set(hemis): surf_fname = op.join(subjects_dir, subject, 'surf', hemi + '.' + surface) vert[hemi], tris[hemi] = read_surface(surf_fname) dist[hemi] = mesh_dist(tris[hemi], vert[hemi]) # create the patches labels = _cortex_parcellation(subject, n_parcel, hemis, vert, dist, random_state) # add a unique color to each label colors = _n_colors(len(labels)) for label, color in zip(labels, colors): label.color = color return labels def _cortex_parcellation(subject, n_parcel, hemis, vertices_, graphs, random_state=None): """Random cortex parcellation.""" labels = [] rng = check_random_state(random_state) for hemi in set(hemis): parcel_size = len(hemis) * len(vertices_[hemi]) // n_parcel graph = graphs[hemi] # distance graph n_vertices = len(vertices_[hemi]) # prepare parcellation parc = np.full(n_vertices, -1, dtype='int32') # initialize active sources s = rng.choice(range(n_vertices)) label_idx = 0 edge = [s] # queue of vertices to process parc[s] = label_idx label_size = 1 rest = len(parc) - 1 # grow from sources while rest: # if there are not free neighbors, start new parcel if not edge: rest_idx = np.where(parc < 0)[0] s = rng.choice(rest_idx) edge = [s] label_idx += 1 label_size = 1 parc[s] = label_idx rest -= 1 vert_from = edge.pop(0) # add neighbors within allowable distance # row = graph[vert_from, :] # row_indices, row_data = row.indices, row.data sl = slice(graph.indptr[vert_from], graph.indptr[vert_from + 1]) row_indices, row_data = graph.indices[sl], graph.data[sl] for vert_to, dist in zip(row_indices, row_data): vert_to_label = parc[vert_to] # abort if the vertex is already occupied if vert_to_label >= 0: continue # abort if outside of extent if label_size > parcel_size: label_idx += 1 label_size = 1 edge = [vert_to] parc[vert_to] = label_idx rest -= 1 break # assign label value parc[vert_to] = label_idx label_size += 1 edge.append(vert_to) rest -= 1 # merging small labels # label adjacency matrix n_labels = label_idx + 1 label_sizes = np.empty(n_labels, dtype=int) label_conn = np.zeros([n_labels, n_labels], dtype='bool') for i in range(n_labels): vertices = np.nonzero(parc == i)[0] label_sizes[i] = len(vertices) neighbor_vertices = graph[vertices, :].indices neighbor_labels = np.unique(np.array(parc[neighbor_vertices])) label_conn[i, neighbor_labels] = 1 np.fill_diagonal(label_conn, 0) # merging label_id = range(n_labels) while n_labels > n_parcel // len(hemis): # smallest label and its smallest neighbor i = np.argmin(label_sizes) neighbors = np.nonzero(label_conn[i, :])[0] j = neighbors[np.argmin(label_sizes[neighbors])] # merging two labels label_conn[j, :] += label_conn[i, :] label_conn[:, j] += label_conn[:, i] label_conn = np.delete(label_conn, i, 0) label_conn = np.delete(label_conn, i, 1) label_conn[j, j] = 0 label_sizes[j] += label_sizes[i] label_sizes = np.delete(label_sizes, i, 0) n_labels -= 1 vertices = np.nonzero(parc == label_id[i])[0] parc[vertices] = label_id[j] label_id = np.delete(label_id, i, 0) # convert parc to labels for i in range(n_labels): vertices = np.nonzero(parc == label_id[i])[0] name = 'label_' + str(i) label_ = Label(vertices, hemi=hemi, name=name, subject=subject) labels.append(label_) return labels def _read_annot_cands(dir_name, raise_error=True): """List the candidate parcellations.""" if not op.isdir(dir_name): if not raise_error: return list() raise IOError('Directory for annotation does not exist: %s', dir_name) cands = os.listdir(dir_name) cands = sorted(set(c.replace('lh.', '').replace('rh.', '').replace( '.annot', '') for c in cands if '.annot' in c), key=lambda x: x.lower()) # exclude .ctab files cands = [c for c in cands if '.ctab' not in c] return cands def _read_annot(fname): """Read a Freesurfer annotation from a .annot file. Note : Copied from PySurfer Parameters ---------- fname : str Path to annotation file Returns ------- annot : numpy array, shape=(n_verts) Annotation id at each vertex ctab : numpy array, shape=(n_entries, 5) RGBA + label id colortable array names : list of str List of region names as stored in the annot file """ if not op.isfile(fname): dir_name = op.split(fname)[0] cands = _read_annot_cands(dir_name) if len(cands) == 0: raise IOError('No such file %s, no candidate parcellations ' 'found in directory' % fname) else: raise IOError('No such file %s, candidate parcellations in ' 'that directory:\n%s' % (fname, '\n'.join(cands))) with open(fname, "rb") as fid: n_verts = np.fromfile(fid, '>i4', 1)[0] data = np.fromfile(fid, '>i4', n_verts * 2).reshape(n_verts, 2) annot = data[data[:, 0], 1] ctab_exists = np.fromfile(fid, '>i4', 1)[0] if not ctab_exists: raise Exception('Color table not found in annotation file') n_entries = np.fromfile(fid, '>i4', 1)[0] if n_entries > 0: length = np.fromfile(fid, '>i4', 1)[0] np.fromfile(fid, '>c', length) # discard orig_tab names = list() ctab = np.zeros((n_entries, 5), np.int64) for i in range(n_entries): name_length = np.fromfile(fid, '>i4', 1)[0] name = np.fromfile(fid, "|S%d" % name_length, 1)[0] names.append(name) ctab[i, :4] = np.fromfile(fid, '>i4', 4) ctab[i, 4] = (ctab[i, 0] + ctab[i, 1] * (2 ** 8) + ctab[i, 2] * (2 ** 16) + ctab[i, 3] * (2 ** 24)) else: ctab_version = -n_entries if ctab_version != 2: raise Exception('Color table version not supported') n_entries = np.fromfile(fid, '>i4', 1)[0] ctab = np.zeros((n_entries, 5), np.int64) length = np.fromfile(fid, '>i4', 1)[0] np.fromfile(fid, "|S%d" % length, 1) # Orig table path entries_to_read = np.fromfile(fid, '>i4', 1)[0] names = list() for i in range(entries_to_read): np.fromfile(fid, '>i4', 1) # Structure name_length = np.fromfile(fid, '>i4', 1)[0] name = np.fromfile(fid, "|S%d" % name_length, 1)[0] names.append(name) ctab[i, :4] = np.fromfile(fid, '>i4', 4) ctab[i, 4] = (ctab[i, 0] + ctab[i, 1] * (2 ** 8) + ctab[i, 2] * (2 ** 16)) # convert to more common alpha value ctab[:, 3] = 255 - ctab[:, 3] return annot, ctab, names def _get_annot_fname(annot_fname, subject, hemi, parc, subjects_dir): """Get the .annot filenames and hemispheres.""" if annot_fname is not None: # we use use the .annot file specified by the user hemis = [op.basename(annot_fname)[:2]] if hemis[0] not in ['lh', 'rh']: raise ValueError('Could not determine hemisphere from filename, ' 'filename has to start with "lh" or "rh".') annot_fname = [annot_fname] else: # construct .annot file names for requested subject, parc, hemi _check_option('hemi', hemi, ['lh', 'rh', 'both']) if hemi == 'both': hemis = ['lh', 'rh'] else: hemis = [hemi] subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) dst = op.join(subjects_dir, subject, 'label', '%%s.%s.annot' % parc) annot_fname = [dst % hemi_ for hemi_ in hemis] return annot_fname, hemis def _load_vert_pos(subject, subjects_dir, surf_name, hemi, n_expected, extra=''): fname_surf = op.join(subjects_dir, subject, 'surf', '%s.%s' % (hemi, surf_name)) vert_pos, _ = read_surface(fname_surf) vert_pos /= 1e3 # the positions in labels are in meters if len(vert_pos) != n_expected: raise RuntimeError('Number of surface vertices (%s) for subject %s' ' does not match the expected number of vertices' '(%s)%s' % (len(vert_pos), subject, n_expected, extra)) return vert_pos @verbose def read_labels_from_annot(subject, parc='aparc', hemi='both', surf_name='white', annot_fname=None, regexp=None, subjects_dir=None, sort=True, verbose=None): """Read labels from a FreeSurfer annotation file. Note: Only cortical labels will be returned. Parameters ---------- subject : str The subject for which to read the parcellation. parc : str The parcellation to use, e.g., 'aparc' or 'aparc.a2009s'. hemi : str The hemisphere from which to read the parcellation, can be 'lh', 'rh', or 'both'. surf_name : str Surface used to obtain vertex locations, e.g., 'white', 'pial'. annot_fname : str or None Filename of the .annot file. If not None, only this file is read and 'parc' and 'hemi' are ignored. regexp : str Regular expression or substring to select particular labels from the parcellation. E.g. 'superior' will return all labels in which this substring is contained. %(subjects_dir)s sort : bool If true, labels will be sorted by name before being returned. .. versionadded:: 0.21.0 %(verbose)s Returns ------- labels : list of Label The labels, sorted by label name (ascending). See Also -------- write_labels_to_annot morph_labels """ logger.info('Reading labels from parcellation...') subjects_dir = get_subjects_dir(subjects_dir) # get the .annot filenames and hemispheres annot_fname, hemis = _get_annot_fname(annot_fname, subject, hemi, parc, subjects_dir) if regexp is not None: # allow for convenient substring match r_ = (re.compile('.*%s.*' % regexp if regexp.replace('_', '').isalnum() else regexp)) # now we are ready to create the labels n_read = 0 labels = list() orig_names = set() for fname, hemi in zip(annot_fname, hemis): # read annotation annot, ctab, label_names = _read_annot(fname) label_rgbas = ctab[:, :4] / 255. label_ids = ctab[:, -1] # load the vertex positions from surface vert_pos = _load_vert_pos( subject, subjects_dir, surf_name, hemi, len(annot), extra='for annotation file %s' % fname) for label_id, label_name, label_rgba in\ zip(label_ids, label_names, label_rgbas): vertices = np.where(annot == label_id)[0] if len(vertices) == 0: # label is not part of cortical surface continue label_name = label_name.decode() orig_names.add(label_name) name = f'{label_name}-{hemi}' if (regexp is not None) and not r_.match(name): continue pos = vert_pos[vertices, :] label = Label(vertices, pos, hemi=hemi, name=name, subject=subject, color=tuple(label_rgba)) labels.append(label) n_read = len(labels) - n_read logger.info(' read %d labels from %s' % (n_read, fname)) # sort the labels by label name if sort: labels = sorted(labels, key=lambda l: l.name) if len(labels) == 0: msg = 'No labels found.' if regexp is not None: orig_names = '\n'.join(sorted(orig_names)) msg += (f' Maybe the regular expression {repr(regexp)} did not ' f'match any of:\n{orig_names}') raise RuntimeError(msg) return labels def _check_labels_subject(labels, subject, name): _validate_type(labels, (list, tuple), 'labels') for label in labels: _validate_type(label, Label, 'each entry in labels') if subject is None: subject = label.subject if subject is not None: # label.subject can be None, depending on init if subject != label.subject: raise ValueError('Got multiple values of %s: %s and %s' % (name, subject, label.subject)) if subject is None: raise ValueError('if label.subject is None for all labels, ' '%s must be provided' % name) return subject @verbose def morph_labels(labels, subject_to, subject_from=None, subjects_dir=None, surf_name='white', verbose=None): """Morph a set of labels. This is useful when morphing a set of non-overlapping labels (such as those obtained with :func:`read_labels_from_annot`) from one subject to another. Parameters ---------- labels : list The labels to morph. subject_to : str The subject to morph labels to. subject_from : str | None The subject to morph labels from. Can be None if the labels have the ``.subject`` property defined. %(subjects_dir)s surf_name : str Surface used to obtain vertex locations, e.g., 'white', 'pial'. %(verbose)s Returns ------- labels : list The morphed labels. See Also -------- read_labels_from_annot mne.Label.morph Notes ----- This does not use the same algorithm as Freesurfer, so the results morphing (e.g., from ``'fsaverage'`` to your subject) might not match what Freesurfer produces during ``recon-all``. .. versionadded:: 0.18 """ subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) subject_from = _check_labels_subject(labels, subject_from, 'subject_from') mmaps = read_morph_map(subject_from, subject_to, subjects_dir) vert_poss = [_load_vert_pos(subject_to, subjects_dir, surf_name, hemi, mmap.shape[0]) for hemi, mmap in zip(('lh', 'rh'), mmaps)] idxs = [mmap.argmax(axis=1) for mmap in mmaps] out_labels = list() values = filename = None for label in labels: li = dict(lh=0, rh=1)[label.hemi] vertices = np.where(np.in1d(idxs[li], label.vertices))[0] pos = vert_poss[li][vertices] out_labels.append( Label(vertices, pos, values, label.hemi, label.comment, label.name, filename, subject_to, label.color, label.verbose)) return out_labels @verbose def labels_to_stc(labels, values, tmin=0, tstep=1, subject=None, src=None, verbose=None): """Convert a set of labels and values to a STC. This function is meant to work like the opposite of `extract_label_time_course`. Parameters ---------- %(eltc_labels)s values : ndarray, shape (n_labels, ...) The values in each label. Can be 1D or 2D. tmin : float The tmin to use for the STC. tstep : float The tstep to use for the STC. subject : str | None The subject for which to create the STC. %(eltc_src)s Can be omitted if using a surface source space, in which case the label vertices will determine the output STC vertices. Required if using a volumetric source space. .. versionadded:: 0.22 %(verbose)s Returns ------- stc : instance of SourceEstimate | instance of VolSourceEstimate The values-in-labels converted to a STC. See Also -------- extract_label_time_course Notes ----- Vertices that appear in more than one label will be averaged. .. versionadded:: 0.18 """ values = np.array(values, float) if values.ndim == 1: values = values[:, np.newaxis] if values.ndim != 2: raise ValueError('values must have 1 or 2 dimensions, got %s' % (values.ndim,)) _validate_type(src, (SourceSpaces, None)) if src is None: data, vertices, subject = _labels_to_stc_surf( labels, values, tmin, tstep, subject) klass = SourceEstimate else: kind = src.kind subject = _check_subject( src._subject, subject, first_kind='source space subject', raise_error=False) _check_option('source space kind', kind, ('surface', 'volume')) if kind == 'volume': klass = VolSourceEstimate else: klass = SourceEstimate # Easiest way is to get a dot-able operator and use it vertices = [s['vertno'].copy() for s in src] stc = klass( np.eye(sum(len(v) for v in vertices)), vertices, 0, 1, subject) label_op = extract_label_time_course( stc, labels, src=src, mode='mean', allow_empty=True) _check_values_labels(values, label_op.shape[0]) rev_op = np.zeros(label_op.shape[::-1]) rev_op[np.arange(label_op.shape[1]), np.argmax(label_op, axis=0)] = 1. data = rev_op @ values return klass(data, vertices, tmin, tstep, subject, verbose) def _check_values_labels(values, n_labels): if n_labels != len(values): raise ValueError( f'values.shape[0] ({values.shape[0]}) must match the number of ' f'labels ({n_labels})') def _labels_to_stc_surf(labels, values, tmin, tstep, subject): from scipy import sparse subject = _check_labels_subject(labels, subject, 'subject') _check_values_labels(values, len(labels)) vertices = dict(lh=[], rh=[]) data = dict(lh=[], rh=[]) for li, label in enumerate(labels): data[label.hemi].append( np.repeat(values[li][np.newaxis], len(label.vertices), axis=0)) vertices[label.hemi].append(label.vertices) hemis = ('lh', 'rh') for hemi in hemis: vertices[hemi] = np.concatenate(vertices[hemi], axis=0) data[hemi] = np.concatenate(data[hemi], axis=0).astype(float) cols = np.arange(len(vertices[hemi])) vertices[hemi], rows = np.unique(vertices[hemi], return_inverse=True) mat = sparse.coo_matrix((np.ones(len(rows)), (rows, cols))).tocsr() mat = mat * sparse.diags(1. / np.asarray(mat.sum(axis=-1))[:, 0]) data[hemi] = mat.dot(data[hemi]) vertices = [vertices[hemi] for hemi in hemis] data = np.concatenate([data[hemi] for hemi in hemis], axis=0) return data, vertices, subject _DEFAULT_TABLE_NAME = 'MNE-Python Colortable' def _write_annot(fname, annot, ctab, names, table_name=_DEFAULT_TABLE_NAME): """Write a Freesurfer annotation to a .annot file.""" assert len(names) == len(ctab) with open(fname, 'wb') as fid: n_verts = len(annot) np.array(n_verts, dtype='>i4').tofile(fid) data = np.zeros((n_verts, 2), dtype='>i4') data[:, 0] = np.arange(n_verts) data[:, 1] = annot data.ravel().tofile(fid) # indicate that color table exists np.array(1, dtype='>i4').tofile(fid) # color table version 2 np.array(-2, dtype='>i4').tofile(fid) # write color table n_entries = len(ctab) np.array(n_entries, dtype='>i4').tofile(fid) # write our color table name _write_annot_str(fid, table_name) # number of entries to write np.array(n_entries, dtype='>i4').tofile(fid) # write entries for ii, (name, color) in enumerate(zip(names, ctab)): np.array(ii, dtype='>i4').tofile(fid) _write_annot_str(fid, name) np.array(color[:4], dtype='>i4').tofile(fid) def _write_annot_str(fid, s): s = s.encode('ascii') + b'\x00' np.array(len(s), '>i4').tofile(fid) fid.write(s) @verbose def write_labels_to_annot(labels, subject=None, parc=None, overwrite=False, subjects_dir=None, annot_fname=None, colormap='hsv', hemi='both', sort=True, table_name=_DEFAULT_TABLE_NAME, verbose=None): r"""Create a FreeSurfer annotation from a list of labels. Parameters ---------- labels : list with instances of mne.Label The labels to create a parcellation from. subject : str | None The subject for which to write the parcellation. parc : str | None The parcellation name to use. overwrite : bool Overwrite files if they already exist. %(subjects_dir)s annot_fname : str | None Filename of the .annot file. If not None, only this file is written and 'parc' and 'subject' are ignored. colormap : str Colormap to use to generate label colors for labels that do not have a color specified. hemi : 'both' | 'lh' | 'rh' The hemisphere(s) for which to write \*.annot files (only applies if annot_fname is not specified; default is 'both'). sort : bool If True (default), labels will be sorted by name before writing. .. versionadded:: 0.21.0 table_name : str The table name to use for the colortable. .. versionadded:: 0.21.0 %(verbose)s See Also -------- read_labels_from_annot Notes ----- Vertices that are not covered by any of the labels are assigned to a label named "unknown". """ logger.info('Writing labels to parcellation...') subjects_dir = get_subjects_dir(subjects_dir) # get the .annot filenames and hemispheres annot_fname, hemis = _get_annot_fname(annot_fname, subject, hemi, parc, subjects_dir) if not overwrite: for fname in annot_fname: if op.exists(fname): raise ValueError('File %s exists. Use "overwrite=True" to ' 'overwrite it' % fname) # prepare container for data to save: to_save = [] # keep track of issues found in the labels duplicate_colors = [] invalid_colors = [] overlap = [] no_color = (-1, -1, -1, -1) no_color_rgb = (-1, -1, -1) for hemi, fname in zip(hemis, annot_fname): hemi_labels = [label for label in labels if label.hemi == hemi] n_hemi_labels = len(hemi_labels) if n_hemi_labels == 0: ctab = np.empty((0, 4), dtype=np.int32) ctab_rgb = ctab[:, :3] else: if sort: hemi_labels.sort(key=lambda label: label.name) # convert colors to 0-255 RGBA tuples hemi_colors = [no_color if label.color is None else tuple(int(round(255 * i)) for i in label.color) for label in hemi_labels] ctab = np.array(hemi_colors, dtype=np.int32) ctab_rgb = ctab[:, :3] # make color dict (for annot ID, only R, G and B count) labels_by_color = defaultdict(list) for label, color in zip(hemi_labels, ctab_rgb): labels_by_color[tuple(color)].append(label.name) # check label colors for color, names in labels_by_color.items(): if color == no_color_rgb: continue if color == (0, 0, 0): # we cannot have an all-zero color, otherw. e.g. tksurfer # refuses to read the parcellation warn('At least one label contains a color with, "r=0, ' 'g=0, b=0" value. Some FreeSurfer tools may fail ' 'to read the parcellation') if any(i > 255 for i in color): msg = ("%s: %s (%s)" % (color, ', '.join(names), hemi)) invalid_colors.append(msg) if len(names) > 1: msg = "%s: %s (%s)" % (color, ', '.join(names), hemi) duplicate_colors.append(msg) # replace None values (labels with unspecified color) if labels_by_color[no_color_rgb]: default_colors = _n_colors(n_hemi_labels, bytes_=True, cmap=colormap) # keep track of colors known to be in hemi_colors : safe_color_i = 0 for i in range(n_hemi_labels): if ctab[i, 0] == -1: color = default_colors[i] # make sure to add no duplicate color while np.any(np.all(color[:3] == ctab_rgb, 1)): color = default_colors[safe_color_i] safe_color_i += 1 # assign the color ctab[i] = color # find number of vertices in surface if subject is not None and subjects_dir is not None: fpath = op.join(subjects_dir, subject, 'surf', '%s.white' % hemi) points, _ = read_surface(fpath) n_vertices = len(points) else: if len(hemi_labels) > 0: max_vert = max(np.max(label.vertices) for label in hemi_labels) n_vertices = max_vert + 1 else: n_vertices = 1 warn('Number of vertices in the surface could not be ' 'verified because the surface file could not be found; ' 'specify subject and subjects_dir parameters.') # Create annot and color table array to write annot = np.empty(n_vertices, dtype=np.int64) annot[:] = -1 # create the annotation ids from the colors annot_id_coding = np.array((1, 2 ** 8, 2 ** 16)) annot_ids = list(np.sum(ctab_rgb * annot_id_coding, axis=1)) for label, annot_id in zip(hemi_labels, annot_ids): # make sure the label is not overwriting another label if np.any(annot[label.vertices] != -1): other_ids = set(annot[label.vertices]) other_ids.discard(-1) other_indices = (annot_ids.index(i) for i in other_ids) other_names = (hemi_labels[i].name for i in other_indices) other_repr = ', '.join(other_names) msg = "%s: %s overlaps %s" % (hemi, label.name, other_repr) overlap.append(msg) annot[label.vertices] = annot_id hemi_names = [label.name for label in hemi_labels] if None in hemi_names: msg = ("Found %i labels with no name. Writing annotation file" "requires all labels named" % (hemi_names.count(None))) # raise the error immediately rather than crash with an # uninformative error later (e.g. cannot join NoneType) raise ValueError(msg) # Assign unlabeled vertices to an "unknown" label unlabeled = (annot == -1) if np.any(unlabeled): msg = ("Assigning %i unlabeled vertices to " "'unknown-%s'" % (unlabeled.sum(), hemi)) logger.info(msg) # find an unused color (try shades of gray first) for i in range(1, 257): if not np.any(np.all((i, i, i) == ctab_rgb, 1)): break if i < 256: color = (i, i, i, 0) else: err = ("Need one free shade of gray for 'unknown' label. " "Please modify your label colors, or assign the " "unlabeled vertices to another label.") raise ValueError(err) # find the id annot_id = np.sum(annot_id_coding * color[:3]) # update data to write annot[unlabeled] = annot_id ctab = np.vstack((ctab, color)) hemi_names.append("unknown") # convert to FreeSurfer alpha values ctab[:, 3] = 255 - ctab[:, 3] # remove hemi ending in names hemi_names = [name[:-3] if name.endswith(hemi) else name for name in hemi_names] to_save.append((fname, annot, ctab, hemi_names)) issues = [] if duplicate_colors: msg = ("Some labels have the same color values (all labels in one " "hemisphere must have a unique color):") duplicate_colors.insert(0, msg) issues.append('\n'.join(duplicate_colors)) if invalid_colors: msg = ("Some labels have invalid color values (all colors should be " "RGBA tuples with values between 0 and 1)") invalid_colors.insert(0, msg) issues.append('\n'.join(invalid_colors)) if overlap: msg = ("Some labels occupy vertices that are also occupied by one or " "more other labels. Each vertex can only be occupied by a " "single label in *.annot files.") overlap.insert(0, msg) issues.append('\n'.join(overlap)) if issues: raise ValueError('\n\n'.join(issues)) # write it for fname, annot, ctab, hemi_names in to_save: logger.info(' writing %d labels to %s' % (len(hemi_names), fname)) _write_annot(fname, annot, ctab, hemi_names, table_name) @fill_doc def select_sources(subject, label, location='center', extent=0., grow_outside=True, subjects_dir=None, name=None, random_state=None, surf='white'): """Select sources from a label. Parameters ---------- %(subject)s label : instance of Label | str Define where the seed will be chosen. If str, can be 'lh' or 'rh', which correspond to left or right hemisphere, respectively. location : 'random' | 'center' | int Location to grow label from. If the location is an int, it represents the vertex number in the corresponding label. If it is a str, it can be either 'random' or 'center'. extent : float Extents (radius in mm) of the labels, i.e. maximum geodesic distance on the white matter surface from the seed. If 0, the resulting label will contain only one vertex. grow_outside : bool Let the region grow outside the original label where location was defined. %(subjects_dir)s name : None | str Assign name to the new label. %(random_state)s surf : str The surface used to simulated the label, defaults to the white surface. Returns ------- label : instance of Label The label that contains the selected sources. Notes ----- This function selects a region of interest on the cortical surface based on a label (or a hemisphere). The sources are selected by growing a region around a seed which is selected randomly, is the center of the label, or is a specific vertex. The selected vertices can extend beyond the initial provided label. This can be prevented by setting grow_outside to False. The selected sources are returned in the form of a new Label object. The values of the label contain the distance from the seed in millimeters. .. versionadded:: 0.18 """ # If label is a string, convert it to a label that contains the whole # hemisphere. if isinstance(label, str): _check_option('label', label, ['lh', 'rh']) surf_filename = op.join(subjects_dir, subject, 'surf', label + '.white') vertices, _ = read_surface(surf_filename) indices = np.arange(len(vertices), dtype=int) label = Label(indices, vertices, hemi=label) # Choose the seed according to the selected strategy. if isinstance(location, str): _check_option('location', location, ['center', 'random']) if location == 'center': seed = label.center_of_mass( subject, restrict_vertices=True, subjects_dir=subjects_dir, surf=surf) else: rng = check_random_state(random_state) seed = rng.choice(label.vertices) else: seed = label.vertices[location] hemi = 0 if label.hemi == 'lh' else 1 new_label = grow_labels(subject, seed, extent, hemi, subjects_dir)[0] # We override the name because grow_label automatically adds a -rh or -lh # to the given parameter. new_label.name = name # Restrict the new label to the vertices of the input label if needed. if not grow_outside: to_keep = np.array([v in label.vertices for v in new_label.vertices]) new_label = Label(new_label.vertices[to_keep], new_label.pos[to_keep], hemi=new_label.hemi, name=name, subject=subject) return new_label def find_pos_in_annot(pos, subject='fsaverage', annot='aparc+aseg', subjects_dir=None): """ Find name in atlas for given MRI coordinates. Parameters ---------- pos : ndarray, shape (3,) Vector of x,y,z coordinates in MRI space. subject : str MRI subject name. annot : str MRI volumetric atlas file name. Do not include the ``.mgz`` suffix. subjects_dir : path-like Path to MRI subjects directory. Returns ------- label : str Anatomical region name from atlas. Notes ----- .. versionadded:: 0.24 """ pos = np.asarray(pos, float) if pos.shape != (3,): raise ValueError( 'pos must be an array of shape (3,), ' f'got {pos.shape}') nibabel = _import_nibabel('read MRI parcellations') if subjects_dir is None: subjects_dir = get_subjects_dir(None) atlas_fname = os.path.join(subjects_dir, subject, 'mri', annot + '.mgz') parcellation_img = nibabel.load(atlas_fname) # Load freesurface atlas LUT lut_inv_dict = read_freesurfer_lut()[0] label_lut = {v: k for k, v in lut_inv_dict.items()} # Find voxel for dipole position mri_vox_t = np.linalg.inv(parcellation_img.header.get_vox2ras_tkr()) vox_dip_pos_f = apply_trans(mri_vox_t, pos) vox_dip_pos = np.rint(vox_dip_pos_f).astype(int) # Get voxel value and label from LUT vol_values = parcellation_img.get_fdata()[tuple(vox_dip_pos.T)] label = label_lut.get(vol_values, 'Unknown') return label
import os.path as op import gc import pytest import numpy as np from numpy.testing import (assert_array_almost_equal, assert_equal, assert_array_equal, assert_allclose) from mne.datasets import testing from mne import (read_forward_solution, apply_forward, apply_forward_raw, average_forward_solutions, write_forward_solution, convert_forward_solution, SourceEstimate, pick_types_forward, read_evokeds, VectorSourceEstimate) from mne.io import read_info from mne.label import read_label from mne.utils import requires_mne, run_subprocess from mne.forward import (restrict_forward_to_stc, restrict_forward_to_label, Forward, is_fixed_orient, compute_orient_prior, compute_depth_prior) from mne.channels import equalize_channels data_path = testing.data_path(download=False) fname_meeg = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-eeg-oct-4-fwd.fif') fname_meeg_grad = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-eeg-oct-2-grad-fwd.fif') fname_evoked = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data', 'test-ave.fif') def assert_forward_allclose(f1, f2, rtol=1e-7): """Compare two potentially converted forward solutions.""" assert_allclose(f1['sol']['data'], f2['sol']['data'], rtol=rtol) assert f1['sol']['ncol'] == f2['sol']['ncol'] assert f1['sol']['ncol'] == f1['sol']['data'].shape[1] assert_allclose(f1['source_nn'], f2['source_nn'], rtol=rtol) if f1['sol_grad'] is not None: assert (f2['sol_grad'] is not None) assert_allclose(f1['sol_grad']['data'], f2['sol_grad']['data']) assert f1['sol_grad']['ncol'] == f2['sol_grad']['ncol'] assert f1['sol_grad']['ncol'] == f1['sol_grad']['data'].shape[1] else: assert (f2['sol_grad'] is None) assert f1['source_ori'] == f2['source_ori'] assert f1['surf_ori'] == f2['surf_ori'] assert f1['src'][0]['coord_frame'] == f1['src'][0]['coord_frame'] @testing.requires_testing_data def test_convert_forward(): """Test converting forward solution between different representations.""" fwd = read_forward_solution(fname_meeg_grad) fwd_repr = repr(fwd) assert ('306' in fwd_repr) assert ('60' in fwd_repr) assert (fwd_repr) assert (isinstance(fwd, Forward)) # look at surface orientation fwd_surf = convert_forward_solution(fwd, surf_ori=True) # go back fwd_new = convert_forward_solution(fwd_surf, surf_ori=False) assert (repr(fwd_new)) assert (isinstance(fwd_new, Forward)) assert_forward_allclose(fwd, fwd_new) del fwd_new gc.collect() # now go to fixed fwd_fixed = convert_forward_solution(fwd_surf, surf_ori=True, force_fixed=True, use_cps=False) del fwd_surf gc.collect() assert (repr(fwd_fixed)) assert (isinstance(fwd_fixed, Forward)) assert (is_fixed_orient(fwd_fixed)) # now go back to cartesian (original condition) fwd_new = convert_forward_solution(fwd_fixed, surf_ori=False, force_fixed=False) assert (repr(fwd_new)) assert (isinstance(fwd_new, Forward)) assert_forward_allclose(fwd, fwd_new) del fwd, fwd_new, fwd_fixed gc.collect() @pytest.mark.slowtest @testing.requires_testing_data def test_io_forward(tmpdir): """Test IO for forward solutions.""" # do extensive tests with MEEG + grad n_channels, n_src = 366, 108 fwd = read_forward_solution(fname_meeg_grad) assert (isinstance(fwd, Forward)) fwd = read_forward_solution(fname_meeg_grad) fwd = convert_forward_solution(fwd, surf_ori=True) leadfield = fwd['sol']['data'] assert_equal(leadfield.shape, (n_channels, n_src)) assert_equal(len(fwd['sol']['row_names']), n_channels) fname_temp = tmpdir.join('test-fwd.fif') with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_temp, fwd, overwrite=True) fwd = read_forward_solution(fname_meeg_grad) fwd = convert_forward_solution(fwd, surf_ori=True) fwd_read = read_forward_solution(fname_temp) fwd_read = convert_forward_solution(fwd_read, surf_ori=True) leadfield = fwd_read['sol']['data'] assert_equal(leadfield.shape, (n_channels, n_src)) assert_equal(len(fwd_read['sol']['row_names']), n_channels) assert_equal(len(fwd_read['info']['chs']), n_channels) assert ('dev_head_t' in fwd_read['info']) assert ('mri_head_t' in fwd_read) assert_array_almost_equal(fwd['sol']['data'], fwd_read['sol']['data']) fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=False) with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_temp, fwd, overwrite=True) fwd_read = read_forward_solution(fname_temp) fwd_read = convert_forward_solution(fwd_read, surf_ori=True, force_fixed=True, use_cps=False) assert (repr(fwd_read)) assert (isinstance(fwd_read, Forward)) assert (is_fixed_orient(fwd_read)) assert_forward_allclose(fwd, fwd_read) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) leadfield = fwd['sol']['data'] assert_equal(leadfield.shape, (n_channels, 1494 / 3)) assert_equal(len(fwd['sol']['row_names']), n_channels) assert_equal(len(fwd['info']['chs']), n_channels) assert ('dev_head_t' in fwd['info']) assert ('mri_head_t' in fwd) assert (fwd['surf_ori']) with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_temp, fwd, overwrite=True) fwd_read = read_forward_solution(fname_temp) fwd_read = convert_forward_solution(fwd_read, surf_ori=True, force_fixed=True, use_cps=True) assert (repr(fwd_read)) assert (isinstance(fwd_read, Forward)) assert (is_fixed_orient(fwd_read)) assert_forward_allclose(fwd, fwd_read) fwd = read_forward_solution(fname_meeg_grad) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) leadfield = fwd['sol']['data'] assert_equal(leadfield.shape, (n_channels, n_src / 3)) assert_equal(len(fwd['sol']['row_names']), n_channels) assert_equal(len(fwd['info']['chs']), n_channels) assert ('dev_head_t' in fwd['info']) assert ('mri_head_t' in fwd) assert (fwd['surf_ori']) with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_temp, fwd, overwrite=True) fwd_read = read_forward_solution(fname_temp) fwd_read = convert_forward_solution(fwd_read, surf_ori=True, force_fixed=True, use_cps=True) assert (repr(fwd_read)) assert (isinstance(fwd_read, Forward)) assert (is_fixed_orient(fwd_read)) assert_forward_allclose(fwd, fwd_read) # test warnings on bad filenames fwd = read_forward_solution(fname_meeg_grad) fwd_badname = tmpdir.join('test-bad-name.fif.gz') with pytest.warns(RuntimeWarning, match='end with'): write_forward_solution(fwd_badname, fwd) with pytest.warns(RuntimeWarning, match='end with'): read_forward_solution(fwd_badname) fwd = read_forward_solution(fname_meeg) write_forward_solution(fname_temp, fwd, overwrite=True) fwd_read = read_forward_solution(fname_temp) assert_forward_allclose(fwd, fwd_read) @testing.requires_testing_data def test_apply_forward(): """Test projection of source space data to sensor space.""" start = 0 stop = 5 n_times = stop - start - 1 sfreq = 10.0 t_start = 0.123 fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) fwd = pick_types_forward(fwd, meg=True) assert isinstance(fwd, Forward) vertno = [fwd['src'][0]['vertno'], fwd['src'][1]['vertno']] stc_data = np.ones((len(vertno[0]) + len(vertno[1]), n_times)) stc = SourceEstimate(stc_data, vertno, tmin=t_start, tstep=1.0 / sfreq) gain_sum = np.sum(fwd['sol']['data'], axis=1) # Evoked evoked = read_evokeds(fname_evoked, condition=0) evoked.pick_types(meg=True) with pytest.warns(RuntimeWarning, match='only .* positive values'): evoked = apply_forward(fwd, stc, evoked.info, start=start, stop=stop) data = evoked.data times = evoked.times # do some tests assert_array_almost_equal(evoked.info['sfreq'], sfreq) assert_array_almost_equal(np.sum(data, axis=1), n_times * gain_sum) assert_array_almost_equal(times[0], t_start) assert_array_almost_equal(times[-1], t_start + (n_times - 1) / sfreq) # vector stc_vec = VectorSourceEstimate( fwd['source_nn'][:, :, np.newaxis] * stc.data[:, np.newaxis], stc.vertices, stc.tmin, stc.tstep) with pytest.warns(RuntimeWarning, match='very large'): evoked_2 = apply_forward(fwd, stc_vec, evoked.info) assert np.abs(evoked_2.data).mean() > 1e-5 assert_allclose(evoked.data, evoked_2.data, atol=1e-10) # Raw with pytest.warns(RuntimeWarning, match='only .* positive values'): raw_proj = apply_forward_raw(fwd, stc, evoked.info, start=start, stop=stop) data, times = raw_proj[:, :] # do some tests assert_array_almost_equal(raw_proj.info['sfreq'], sfreq) assert_array_almost_equal(np.sum(data, axis=1), n_times * gain_sum) atol = 1. / sfreq assert_allclose(raw_proj.first_samp / sfreq, t_start, atol=atol) assert_allclose(raw_proj.last_samp / sfreq, t_start + (n_times - 1) / sfreq, atol=atol) @testing.requires_testing_data def test_restrict_forward_to_stc(tmpdir): """Test restriction of source space to source SourceEstimate.""" start = 0 stop = 5 n_times = stop - start - 1 sfreq = 10.0 t_start = 0.123 fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) fwd = pick_types_forward(fwd, meg=True) vertno = [fwd['src'][0]['vertno'][0:15], fwd['src'][1]['vertno'][0:5]] stc_data = np.ones((len(vertno[0]) + len(vertno[1]), n_times)) stc = SourceEstimate(stc_data, vertno, tmin=t_start, tstep=1.0 / sfreq) fwd_out = restrict_forward_to_stc(fwd, stc) assert (isinstance(fwd_out, Forward)) assert_equal(fwd_out['sol']['ncol'], 20) assert_equal(fwd_out['src'][0]['nuse'], 15) assert_equal(fwd_out['src'][1]['nuse'], 5) assert_equal(fwd_out['src'][0]['vertno'], fwd['src'][0]['vertno'][0:15]) assert_equal(fwd_out['src'][1]['vertno'], fwd['src'][1]['vertno'][0:5]) fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=False) fwd = pick_types_forward(fwd, meg=True) vertno = [fwd['src'][0]['vertno'][0:15], fwd['src'][1]['vertno'][0:5]] stc_data = np.ones((len(vertno[0]) + len(vertno[1]), n_times)) stc = SourceEstimate(stc_data, vertno, tmin=t_start, tstep=1.0 / sfreq) fwd_out = restrict_forward_to_stc(fwd, stc) assert_equal(fwd_out['sol']['ncol'], 60) assert_equal(fwd_out['src'][0]['nuse'], 15) assert_equal(fwd_out['src'][1]['nuse'], 5) assert_equal(fwd_out['src'][0]['vertno'], fwd['src'][0]['vertno'][0:15]) assert_equal(fwd_out['src'][1]['vertno'], fwd['src'][1]['vertno'][0:5]) # Test saving the restricted forward object. This only works if all fields # are properly accounted for. fname_copy = tmpdir.join('copy-fwd.fif') with pytest.warns(RuntimeWarning, match='stored on disk'): write_forward_solution(fname_copy, fwd_out, overwrite=True) fwd_out_read = read_forward_solution(fname_copy) fwd_out_read = convert_forward_solution(fwd_out_read, surf_ori=True, force_fixed=False) assert_forward_allclose(fwd_out, fwd_out_read) @testing.requires_testing_data def test_restrict_forward_to_label(tmpdir): """Test restriction of source space to label.""" fwd = read_forward_solution(fname_meeg) fwd = convert_forward_solution(fwd, surf_ori=True, force_fixed=True, use_cps=True) fwd = pick_types_forward(fwd, meg=True) label_path = op.join(data_path, 'MEG', 'sample', 'labels') labels = ['Aud-lh', 'Vis-rh'] label_lh = read_label(op.join(label_path, labels[0] + '.label')) label_rh = read_label(op.join(label_path, labels[1] + '.label')) fwd_out = restrict_forward_to_label(fwd, [label_lh, label_rh]) src_sel_lh = np.intersect1d(fwd['src'][0]['vertno'], label_lh.vertices) src_sel_lh = np.searchsorted(fwd['src'][0]['vertno'], src_sel_lh) vertno_lh = fwd['src'][0]['vertno'][src_sel_lh] nuse_lh = fwd['src'][0]['nuse'] src_sel_rh = np.intersect1d(fwd['src'][1]['vertno'], label_rh.vertices) src_sel_rh = np.searchsorted(fwd['src'][1]['vertno'], src_sel_rh) vertno_rh = fwd['src'][1]['vertno'][src_sel_rh] src_sel_rh += nuse_lh assert_equal(fwd_out['sol']['ncol'], len(src_sel_lh) + len(src_sel_rh)) assert_equal(fwd_out['src'][0]['nuse'], len(src_sel_lh)) assert_equal(fwd_out['src'][1]['nuse'], len(src_sel_rh)) assert_equal(fwd_out['src'][0]['vertno'], vertno_lh) assert_equal(fwd_out['src'][1]['vertno'], vertno_rh) fwd = read_forward_solution(fname_meeg) fwd = pick_types_forward(fwd, meg=True) label_path = op.join(data_path, 'MEG', 'sample', 'labels') labels = ['Aud-lh', 'Vis-rh'] label_lh = read_label(op.join(label_path, labels[0] + '.label')) label_rh = read_label(op.join(label_path, labels[1] + '.label')) fwd_out = restrict_forward_to_label(fwd, [label_lh, label_rh]) src_sel_lh = np.intersect1d(fwd['src'][0]['vertno'], label_lh.vertices) src_sel_lh = np.searchsorted(fwd['src'][0]['vertno'], src_sel_lh) vertno_lh = fwd['src'][0]['vertno'][src_sel_lh] nuse_lh = fwd['src'][0]['nuse'] src_sel_rh = np.intersect1d(fwd['src'][1]['vertno'], label_rh.vertices) src_sel_rh = np.searchsorted(fwd['src'][1]['vertno'], src_sel_rh) vertno_rh = fwd['src'][1]['vertno'][src_sel_rh] src_sel_rh += nuse_lh assert_equal(fwd_out['sol']['ncol'], 3 * (len(src_sel_lh) + len(src_sel_rh))) assert_equal(fwd_out['src'][0]['nuse'], len(src_sel_lh)) assert_equal(fwd_out['src'][1]['nuse'], len(src_sel_rh)) assert_equal(fwd_out['src'][0]['vertno'], vertno_lh) assert_equal(fwd_out['src'][1]['vertno'], vertno_rh) # Test saving the restricted forward object. This only works if all fields # are properly accounted for. fname_copy = tmpdir.join('copy-fwd.fif') write_forward_solution(fname_copy, fwd_out, overwrite=True) fwd_out_read = read_forward_solution(fname_copy) assert_forward_allclose(fwd_out, fwd_out_read) @testing.requires_testing_data @requires_mne def test_average_forward_solution(tmpdir): """Test averaging forward solutions.""" fwd = read_forward_solution(fname_meeg) # input not a list pytest.raises(TypeError, average_forward_solutions, 1) # list is too short pytest.raises(ValueError, average_forward_solutions, []) # negative weights pytest.raises(ValueError, average_forward_solutions, [fwd, fwd], [-1, 0]) # all zero weights pytest.raises(ValueError, average_forward_solutions, [fwd, fwd], [0, 0]) # weights not same length pytest.raises(ValueError, average_forward_solutions, [fwd, fwd], [0, 0, 0]) # list does not only have all dict() pytest.raises(TypeError, average_forward_solutions, [1, fwd]) # try an easy case fwd_copy = average_forward_solutions([fwd]) assert (isinstance(fwd_copy, Forward)) assert_array_equal(fwd['sol']['data'], fwd_copy['sol']['data']) # modify a fwd solution, save it, use MNE to average with old one fwd_copy['sol']['data'] *= 0.5 fname_copy = str(tmpdir.join('copy-fwd.fif')) write_forward_solution(fname_copy, fwd_copy, overwrite=True) cmd = ('mne_average_forward_solutions', '--fwd', fname_meeg, '--fwd', fname_copy, '--out', fname_copy) run_subprocess(cmd) # now let's actually do it, with one filename and one fwd fwd_ave = average_forward_solutions([fwd, fwd_copy]) assert_array_equal(0.75 * fwd['sol']['data'], fwd_ave['sol']['data']) # fwd_ave_mne = read_forward_solution(fname_copy) # assert_array_equal(fwd_ave_mne['sol']['data'], fwd_ave['sol']['data']) # with gradient fwd = read_forward_solution(fname_meeg_grad) fwd_ave = average_forward_solutions([fwd, fwd]) assert_forward_allclose(fwd, fwd_ave) @testing.requires_testing_data def test_priors(): """Test prior computations.""" # Depth prior fwd = read_forward_solution(fname_meeg) assert not is_fixed_orient(fwd) n_sources = fwd['nsource'] info = read_info(fname_evoked) depth_prior = compute_depth_prior(fwd, info, exp=0.8) assert depth_prior.shape == (3 * n_sources,) depth_prior = compute_depth_prior(fwd, info, exp=0.) assert_array_equal(depth_prior, 1.) with pytest.raises(ValueError, match='must be "whiten"'): compute_depth_prior(fwd, info, limit_depth_chs='foo') with pytest.raises(ValueError, match='noise_cov must be a Covariance'): compute_depth_prior(fwd, info, limit_depth_chs='whiten') fwd_fixed = convert_forward_solution(fwd, force_fixed=True) depth_prior = compute_depth_prior(fwd_fixed, info=info) assert depth_prior.shape == (n_sources,) # Orientation prior orient_prior = compute_orient_prior(fwd, 1.) assert_array_equal(orient_prior, 1.) orient_prior = compute_orient_prior(fwd_fixed, 0.) assert_array_equal(orient_prior, 1.) with pytest.raises(ValueError, match='oriented in surface coordinates'): compute_orient_prior(fwd, 0.5) fwd_surf_ori = convert_forward_solution(fwd, surf_ori=True) orient_prior = compute_orient_prior(fwd_surf_ori, 0.5) assert all(np.in1d(orient_prior, (0.5, 1.))) with pytest.raises(ValueError, match='between 0 and 1'): compute_orient_prior(fwd_surf_ori, -0.5) with pytest.raises(ValueError, match='with fixed orientation'): compute_orient_prior(fwd_fixed, 0.5) @testing.requires_testing_data def test_equalize_channels(): """Test equalization of channels for instances of Forward.""" fwd1 = read_forward_solution(fname_meeg) fwd1.pick_channels(['EEG 001', 'EEG 002', 'EEG 003']) fwd2 = fwd1.copy().pick_channels(['EEG 002', 'EEG 001'], ordered=True) fwd1, fwd2 = equalize_channels([fwd1, fwd2]) assert fwd1.ch_names == ['EEG 001', 'EEG 002'] assert fwd2.ch_names == ['EEG 001', 'EEG 002']
rkmaddox/mne-python
mne/forward/tests/test_forward.py
mne/label.py
"""Common IO api utilities""" import codecs from contextlib import closing, contextmanager import csv import mmap import os import zipfile import pandas.compat as compat from pandas.compat import BytesIO, StringIO, string_types, text_type from pandas.errors import ( # noqa DtypeWarning, EmptyDataError, ParserError, ParserWarning) from pandas.core.dtypes.common import is_file_like, is_number import pandas.core.common as com from pandas.io.formats.printing import pprint_thing # gh-12665: Alias for now and remove later. CParserError = ParserError # common NA values # no longer excluding inf representations # '1.#INF','-1.#INF', '1.#INF000000', _NA_VALUES = {'-1.#IND', '1.#QNAN', '1.#IND', '-1.#QNAN', '#N/A N/A', '#N/A', 'N/A', 'n/a', 'NA', '#NA', 'NULL', 'null', 'NaN', '-NaN', 'nan', '-nan', ''} if compat.PY3: from urllib.request import urlopen, pathname2url _urlopen = urlopen from urllib.parse import urlparse as parse_url from urllib.parse import (uses_relative, uses_netloc, uses_params, urlencode, urljoin) from urllib.error import URLError from http.client import HTTPException # noqa else: from urllib2 import urlopen as _urlopen from urllib import urlencode, pathname2url # noqa from urlparse import urlparse as parse_url from urlparse import uses_relative, uses_netloc, uses_params, urljoin from urllib2 import URLError # noqa from httplib import HTTPException # noqa from contextlib import contextmanager, closing # noqa from functools import wraps # noqa # @wraps(_urlopen) @contextmanager def urlopen(*args, **kwargs): with closing(_urlopen(*args, **kwargs)) as f: yield f _VALID_URLS = set(uses_relative + uses_netloc + uses_params) _VALID_URLS.discard('') class BaseIterator(object): """Subclass this and provide a "__next__()" method to obtain an iterator. Useful only when the object being iterated is non-reusable (e.g. OK for a parser, not for an in-memory table, yes for its iterator).""" def __iter__(self): return self def __next__(self): raise com.AbstractMethodError(self) if not compat.PY3: BaseIterator.next = lambda self: self.__next__() def _is_url(url): """Check to see if a URL has a valid protocol. Parameters ---------- url : str or unicode Returns ------- isurl : bool If `url` has a valid protocol return True otherwise False. """ try: return parse_url(url).scheme in _VALID_URLS except Exception: return False def _expand_user(filepath_or_buffer): """Return the argument with an initial component of ~ or ~user replaced by that user's home directory. Parameters ---------- filepath_or_buffer : object to be converted if possible Returns ------- expanded_filepath_or_buffer : an expanded filepath or the input if not expandable """ if isinstance(filepath_or_buffer, string_types): return os.path.expanduser(filepath_or_buffer) return filepath_or_buffer def _validate_header_arg(header): if isinstance(header, bool): raise TypeError("Passing a bool to header is invalid. " "Use header=None for no header or " "header=int or list-like of ints to specify " "the row(s) making up the column names") def _stringify_path(filepath_or_buffer): """Attempt to convert a path-like object to a string. Parameters ---------- filepath_or_buffer : object to be converted Returns ------- str_filepath_or_buffer : maybe a string version of the object Notes ----- Objects supporting the fspath protocol (python 3.6+) are coerced according to its __fspath__ method. For backwards compatibility with older pythons, pathlib.Path and py.path objects are specially coerced. Any other object is passed through unchanged, which includes bytes, strings, buffers, or anything else that's not even path-like. """ try: import pathlib _PATHLIB_INSTALLED = True except ImportError: _PATHLIB_INSTALLED = False try: from py.path import local as LocalPath _PY_PATH_INSTALLED = True except ImportError: _PY_PATH_INSTALLED = False if hasattr(filepath_or_buffer, '__fspath__'): return filepath_or_buffer.__fspath__() if _PATHLIB_INSTALLED and isinstance(filepath_or_buffer, pathlib.Path): return text_type(filepath_or_buffer) if _PY_PATH_INSTALLED and isinstance(filepath_or_buffer, LocalPath): return filepath_or_buffer.strpath return filepath_or_buffer def is_s3_url(url): """Check for an s3, s3n, or s3a url""" try: return parse_url(url).scheme in ['s3', 's3n', 's3a'] except Exception: return False def is_gcs_url(url): """Check for a gcs url""" try: return parse_url(url).scheme in ['gcs', 'gs'] except Exception: return False def get_filepath_or_buffer(filepath_or_buffer, encoding=None, compression=None, mode=None): """ If the filepath_or_buffer is a url, translate and return the buffer. Otherwise passthrough. Parameters ---------- filepath_or_buffer : a url, filepath (str, py.path.local or pathlib.Path), or buffer encoding : the encoding to use to decode py3 bytes, default is 'utf-8' mode : str, optional Returns ------- tuple of ({a filepath_ or buffer or S3File instance}, encoding, str, compression, str, should_close, bool) """ filepath_or_buffer = _stringify_path(filepath_or_buffer) if _is_url(filepath_or_buffer): req = _urlopen(filepath_or_buffer) content_encoding = req.headers.get('Content-Encoding', None) if content_encoding == 'gzip': # Override compression based on Content-Encoding header compression = 'gzip' reader = BytesIO(req.read()) req.close() return reader, encoding, compression, True if is_s3_url(filepath_or_buffer): from pandas.io import s3 return s3.get_filepath_or_buffer(filepath_or_buffer, encoding=encoding, compression=compression, mode=mode) if is_gcs_url(filepath_or_buffer): from pandas.io import gcs return gcs.get_filepath_or_buffer(filepath_or_buffer, encoding=encoding, compression=compression, mode=mode) if isinstance(filepath_or_buffer, (compat.string_types, compat.binary_type, mmap.mmap)): return _expand_user(filepath_or_buffer), None, compression, False if not is_file_like(filepath_or_buffer): msg = "Invalid file path or buffer object type: {_type}" raise ValueError(msg.format(_type=type(filepath_or_buffer))) return filepath_or_buffer, None, compression, False def file_path_to_url(path): """ converts an absolute native path to a FILE URL. Parameters ---------- path : a path in native format Returns ------- a valid FILE URL """ return urljoin('file:', pathname2url(path)) _compression_to_extension = { 'gzip': '.gz', 'bz2': '.bz2', 'zip': '.zip', 'xz': '.xz', } def _infer_compression(filepath_or_buffer, compression): """ Get the compression method for filepath_or_buffer. If compression='infer', the inferred compression method is returned. Otherwise, the input compression method is returned unchanged, unless it's invalid, in which case an error is raised. Parameters ---------- filepath_or_buffer : a path (str) or buffer compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None} If 'infer' and `filepath_or_buffer` is path-like, then detect compression from the following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise no compression). Returns ------- string or None : compression method Raises ------ ValueError on invalid compression specified """ # No compression has been explicitly specified if compression is None: return None # Infer compression if compression == 'infer': # Convert all path types (e.g. pathlib.Path) to strings filepath_or_buffer = _stringify_path(filepath_or_buffer) if not isinstance(filepath_or_buffer, compat.string_types): # Cannot infer compression of a buffer, assume no compression return None # Infer compression from the filename/URL extension for compression, extension in _compression_to_extension.items(): if filepath_or_buffer.endswith(extension): return compression return None # Compression has been specified. Check that it's valid if compression in _compression_to_extension: return compression msg = 'Unrecognized compression type: {}'.format(compression) valid = ['infer', None] + sorted(_compression_to_extension) msg += '\nValid compression types are {}'.format(valid) raise ValueError(msg) def _get_handle(path_or_buf, mode, encoding=None, compression=None, memory_map=False, is_text=True): """ Get file handle for given path/buffer and mode. Parameters ---------- path_or_buf : a path (str) or buffer mode : str mode to open path_or_buf with encoding : str or None compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, default None If 'infer' and `filepath_or_buffer` is path-like, then detect compression from the following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise no compression). memory_map : boolean, default False See parsers._parser_params for more information. is_text : boolean, default True whether file/buffer is in text format (csv, json, etc.), or in binary mode (pickle, etc.) Returns ------- f : file-like A file-like object handles : list of file-like objects A list of file-like object that were opened in this function. """ try: from s3fs import S3File need_text_wrapping = (BytesIO, S3File) except ImportError: need_text_wrapping = (BytesIO,) handles = list() f = path_or_buf # Convert pathlib.Path/py.path.local or string path_or_buf = _stringify_path(path_or_buf) is_path = isinstance(path_or_buf, compat.string_types) if is_path: compression = _infer_compression(path_or_buf, compression) if compression: if compat.PY2 and not is_path and encoding: msg = 'compression with encoding is not yet supported in Python 2' raise ValueError(msg) # GZ Compression if compression == 'gzip': import gzip if is_path: f = gzip.open(path_or_buf, mode) else: f = gzip.GzipFile(fileobj=path_or_buf) # BZ Compression elif compression == 'bz2': import bz2 if is_path: f = bz2.BZ2File(path_or_buf, mode) elif compat.PY2: # Python 2's bz2 module can't take file objects, so have to # run through decompress manually f = StringIO(bz2.decompress(path_or_buf.read())) path_or_buf.close() else: f = bz2.BZ2File(path_or_buf) # ZIP Compression elif compression == 'zip': zf = BytesZipFile(path_or_buf, mode) # Ensure the container is closed as well. handles.append(zf) if zf.mode == 'w': f = zf elif zf.mode == 'r': zip_names = zf.namelist() if len(zip_names) == 1: f = zf.open(zip_names.pop()) elif len(zip_names) == 0: raise ValueError('Zero files found in ZIP file {}' .format(path_or_buf)) else: raise ValueError('Multiple files found in ZIP file.' ' Only one file per ZIP: {}' .format(zip_names)) # XZ Compression elif compression == 'xz': lzma = compat.import_lzma() f = lzma.LZMAFile(path_or_buf, mode) # Unrecognized Compression else: msg = 'Unrecognized compression type: {}'.format(compression) raise ValueError(msg) handles.append(f) elif is_path: if compat.PY2: # Python 2 mode = "wb" if mode == "w" else mode f = open(path_or_buf, mode) elif encoding: # Python 3 and encoding f = open(path_or_buf, mode, encoding=encoding, newline="") elif is_text: # Python 3 and no explicit encoding f = open(path_or_buf, mode, errors='replace', newline="") else: # Python 3 and binary mode f = open(path_or_buf, mode) handles.append(f) # in Python 3, convert BytesIO or fileobjects passed with an encoding if (compat.PY3 and is_text and (compression or isinstance(f, need_text_wrapping))): from io import TextIOWrapper f = TextIOWrapper(f, encoding=encoding) handles.append(f) if memory_map and hasattr(f, 'fileno'): try: g = MMapWrapper(f) f.close() f = g except Exception: # we catch any errors that may have occurred # because that is consistent with the lower-level # functionality of the C engine (pd.read_csv), so # leave the file handler as is then pass return f, handles class BytesZipFile(zipfile.ZipFile, BytesIO): """ Wrapper for standard library class ZipFile and allow the returned file-like handle to accept byte strings via `write` method. BytesIO provides attributes of file-like object and ZipFile.writestr writes bytes strings into a member of the archive. """ # GH 17778 def __init__(self, file, mode, compression=zipfile.ZIP_DEFLATED, **kwargs): if mode in ['wb', 'rb']: mode = mode.replace('b', '') super(BytesZipFile, self).__init__(file, mode, compression, **kwargs) def write(self, data): super(BytesZipFile, self).writestr(self.filename, data) @property def closed(self): return self.fp is None class MMapWrapper(BaseIterator): """ Wrapper for the Python's mmap class so that it can be properly read in by Python's csv.reader class. Parameters ---------- f : file object File object to be mapped onto memory. Must support the 'fileno' method or have an equivalent attribute """ def __init__(self, f): self.mmap = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) def __getattr__(self, name): return getattr(self.mmap, name) def __iter__(self): return self def __next__(self): newline = self.mmap.readline() # readline returns bytes, not str, in Python 3, # but Python's CSV reader expects str, so convert # the output to str before continuing if compat.PY3: newline = compat.bytes_to_str(newline) # mmap doesn't raise if reading past the allocated # data but instead returns an empty string, so raise # if that is returned if newline == '': raise StopIteration return newline if not compat.PY3: MMapWrapper.next = lambda self: self.__next__() class UTF8Recoder(BaseIterator): """ Iterator that reads an encoded stream and reencodes the input to UTF-8 """ def __init__(self, f, encoding): self.reader = codecs.getreader(encoding)(f) def read(self, bytes=-1): return self.reader.read(bytes).encode("utf-8") def readline(self): return self.reader.readline().encode("utf-8") def next(self): return next(self.reader).encode("utf-8") if compat.PY3: # pragma: no cover def UnicodeReader(f, dialect=csv.excel, encoding="utf-8", **kwds): # ignore encoding return csv.reader(f, dialect=dialect, **kwds) def UnicodeWriter(f, dialect=csv.excel, encoding="utf-8", **kwds): return csv.writer(f, dialect=dialect, **kwds) else: class UnicodeReader(BaseIterator): """ A CSV reader which will iterate over lines in the CSV file "f", which is encoded in the given encoding. On Python 3, this is replaced (below) by csv.reader, which handles unicode. """ def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds): f = UTF8Recoder(f, encoding) self.reader = csv.reader(f, dialect=dialect, **kwds) def __next__(self): row = next(self.reader) return [compat.text_type(s, "utf-8") for s in row] class UnicodeWriter(object): """ A CSV writer which will write rows to CSV file "f", which is encoded in the given encoding. """ def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds): # Redirect output to a queue self.queue = StringIO() self.writer = csv.writer(self.queue, dialect=dialect, **kwds) self.stream = f self.encoder = codecs.getincrementalencoder(encoding)() self.quoting = kwds.get("quoting", None) def writerow(self, row): def _check_as_is(x): return (self.quoting == csv.QUOTE_NONNUMERIC and is_number(x)) or isinstance(x, str) row = [x if _check_as_is(x) else pprint_thing(x).encode("utf-8") for x in row] self.writer.writerow([s for s in row]) # Fetch UTF-8 output from the queue ... data = self.queue.getvalue() data = data.decode("utf-8") # ... and re-encode it into the target encoding data = self.encoder.encode(data) # write to the target stream self.stream.write(data) # empty queue self.queue.truncate(0) def writerows(self, rows): def _check_as_is(x): return (self.quoting == csv.QUOTE_NONNUMERIC and is_number(x)) or isinstance(x, str) for i, row in enumerate(rows): rows[i] = [x if _check_as_is(x) else pprint_thing(x).encode("utf-8") for x in row] self.writer.writerows([[s for s in row] for row in rows]) # Fetch UTF-8 output from the queue ... data = self.queue.getvalue() data = data.decode("utf-8") # ... and re-encode it into the target encoding data = self.encoder.encode(data) # write to the target stream self.stream.write(data) # empty queue self.queue.truncate(0)
# -*- coding: utf-8 -*- import numpy as np import pytest import pandas as pd from pandas import Index, Int64Index, NaT, Period, PeriodIndex, period_range import pandas.util.testing as tm class TestPeriodIndexAsType(object): @pytest.mark.parametrize('dtype', [ float, 'timedelta64', 'timedelta64[ns]']) def test_astype_raises(self, dtype): # GH#13149, GH#13209 idx = PeriodIndex(['2016-05-16', 'NaT', NaT, np.NaN], freq='D') msg = 'Cannot cast PeriodArray to dtype' with tm.assert_raises_regex(TypeError, msg): idx.astype(dtype) def test_astype_conversion(self): # GH#13149, GH#13209 idx = PeriodIndex(['2016-05-16', 'NaT', NaT, np.NaN], freq='D') result = idx.astype(object) expected = Index([Period('2016-05-16', freq='D')] + [Period(NaT, freq='D')] * 3, dtype='object') tm.assert_index_equal(result, expected) result = idx.astype(np.int64) expected = Int64Index([16937] + [-9223372036854775808] * 3, dtype=np.int64) tm.assert_index_equal(result, expected) result = idx.astype(str) expected = Index(str(x) for x in idx) tm.assert_index_equal(result, expected) idx = period_range('1990', '2009', freq='A') result = idx.astype('i8') tm.assert_index_equal(result, Index(idx.asi8)) tm.assert_numpy_array_equal(result.values, idx.asi8) def test_astype_object(self): idx = pd.PeriodIndex([], freq='M') exp = np.array([], dtype=object) tm.assert_numpy_array_equal(idx.astype(object).values, exp) tm.assert_numpy_array_equal(idx._mpl_repr(), exp) idx = pd.PeriodIndex(['2011-01', pd.NaT], freq='M') exp = np.array([pd.Period('2011-01', freq='M'), pd.NaT], dtype=object) tm.assert_numpy_array_equal(idx.astype(object).values, exp) tm.assert_numpy_array_equal(idx._mpl_repr(), exp) exp = np.array([pd.Period('2011-01-01', freq='D'), pd.NaT], dtype=object) idx = pd.PeriodIndex(['2011-01-01', pd.NaT], freq='D') tm.assert_numpy_array_equal(idx.astype(object).values, exp) tm.assert_numpy_array_equal(idx._mpl_repr(), exp) # TODO: de-duplicate this version (from test_ops) with the one above # (from test_period) def test_astype_object2(self): idx = pd.period_range(start='2013-01-01', periods=4, freq='M', name='idx') expected_list = [pd.Period('2013-01-31', freq='M'), pd.Period('2013-02-28', freq='M'), pd.Period('2013-03-31', freq='M'), pd.Period('2013-04-30', freq='M')] expected = pd.Index(expected_list, dtype=object, name='idx') result = idx.astype(object) assert isinstance(result, Index) assert result.dtype == object tm.assert_index_equal(result, expected) assert result.name == expected.name assert idx.tolist() == expected_list idx = PeriodIndex(['2013-01-01', '2013-01-02', 'NaT', '2013-01-04'], freq='D', name='idx') expected_list = [pd.Period('2013-01-01', freq='D'), pd.Period('2013-01-02', freq='D'), pd.Period('NaT', freq='D'), pd.Period('2013-01-04', freq='D')] expected = pd.Index(expected_list, dtype=object, name='idx') result = idx.astype(object) assert isinstance(result, Index) assert result.dtype == object tm.assert_index_equal(result, expected) for i in [0, 1, 3]: assert result[i] == expected[i] assert result[2] is pd.NaT assert result.name == expected.name result_list = idx.tolist() for i in [0, 1, 3]: assert result_list[i] == expected_list[i] assert result_list[2] is pd.NaT
harisbal/pandas
pandas/tests/indexes/period/test_astype.py
pandas/io/common.py
import warnings # TODO: Remove after 0.23.x warnings.warn("'pandas.core' is private. Use 'pandas.Categorical'", FutureWarning, stacklevel=2) from pandas.core.arrays import Categorical # noqa from pandas.core.dtypes.dtypes import CategoricalDtype # noqa
# -*- coding: utf-8 -*- import numpy as np import pytest import pandas as pd from pandas import Index, Int64Index, NaT, Period, PeriodIndex, period_range import pandas.util.testing as tm class TestPeriodIndexAsType(object): @pytest.mark.parametrize('dtype', [ float, 'timedelta64', 'timedelta64[ns]']) def test_astype_raises(self, dtype): # GH#13149, GH#13209 idx = PeriodIndex(['2016-05-16', 'NaT', NaT, np.NaN], freq='D') msg = 'Cannot cast PeriodArray to dtype' with tm.assert_raises_regex(TypeError, msg): idx.astype(dtype) def test_astype_conversion(self): # GH#13149, GH#13209 idx = PeriodIndex(['2016-05-16', 'NaT', NaT, np.NaN], freq='D') result = idx.astype(object) expected = Index([Period('2016-05-16', freq='D')] + [Period(NaT, freq='D')] * 3, dtype='object') tm.assert_index_equal(result, expected) result = idx.astype(np.int64) expected = Int64Index([16937] + [-9223372036854775808] * 3, dtype=np.int64) tm.assert_index_equal(result, expected) result = idx.astype(str) expected = Index(str(x) for x in idx) tm.assert_index_equal(result, expected) idx = period_range('1990', '2009', freq='A') result = idx.astype('i8') tm.assert_index_equal(result, Index(idx.asi8)) tm.assert_numpy_array_equal(result.values, idx.asi8) def test_astype_object(self): idx = pd.PeriodIndex([], freq='M') exp = np.array([], dtype=object) tm.assert_numpy_array_equal(idx.astype(object).values, exp) tm.assert_numpy_array_equal(idx._mpl_repr(), exp) idx = pd.PeriodIndex(['2011-01', pd.NaT], freq='M') exp = np.array([pd.Period('2011-01', freq='M'), pd.NaT], dtype=object) tm.assert_numpy_array_equal(idx.astype(object).values, exp) tm.assert_numpy_array_equal(idx._mpl_repr(), exp) exp = np.array([pd.Period('2011-01-01', freq='D'), pd.NaT], dtype=object) idx = pd.PeriodIndex(['2011-01-01', pd.NaT], freq='D') tm.assert_numpy_array_equal(idx.astype(object).values, exp) tm.assert_numpy_array_equal(idx._mpl_repr(), exp) # TODO: de-duplicate this version (from test_ops) with the one above # (from test_period) def test_astype_object2(self): idx = pd.period_range(start='2013-01-01', periods=4, freq='M', name='idx') expected_list = [pd.Period('2013-01-31', freq='M'), pd.Period('2013-02-28', freq='M'), pd.Period('2013-03-31', freq='M'), pd.Period('2013-04-30', freq='M')] expected = pd.Index(expected_list, dtype=object, name='idx') result = idx.astype(object) assert isinstance(result, Index) assert result.dtype == object tm.assert_index_equal(result, expected) assert result.name == expected.name assert idx.tolist() == expected_list idx = PeriodIndex(['2013-01-01', '2013-01-02', 'NaT', '2013-01-04'], freq='D', name='idx') expected_list = [pd.Period('2013-01-01', freq='D'), pd.Period('2013-01-02', freq='D'), pd.Period('NaT', freq='D'), pd.Period('2013-01-04', freq='D')] expected = pd.Index(expected_list, dtype=object, name='idx') result = idx.astype(object) assert isinstance(result, Index) assert result.dtype == object tm.assert_index_equal(result, expected) for i in [0, 1, 3]: assert result[i] == expected[i] assert result[2] is pd.NaT assert result.name == expected.name result_list = idx.tolist() for i in [0, 1, 3]: assert result_list[i] == expected_list[i] assert result_list[2] is pd.NaT
harisbal/pandas
pandas/tests/indexes/period/test_astype.py
pandas/core/categorical.py
"""Process This module implements a wrapper for basic ``subprocess.Popen`` functionality. """ from io import BytesIO from subprocess import Popen, PIPE from circuits.core.manager import TIMEOUT from circuits import handler, BaseComponent from .file import File from .events import started, stopped, write class Process(BaseComponent): channel = "process" def init(self, args, cwd=None, shell=False): self.args = args self.cwd = cwd self.shell = shell self.p = None self.stderr = BytesIO() self.stdout = BytesIO() self._status = None self._terminated = False self._stdout_closed = False self._stderr_closed = False self._stdin = None self._stderr = None self._stdout = None self._stdin_closed_handler = None self._stderr_read_handler = None self._stdout_read_handler = None self._stderr_closed_handler = None self._stdout_closed_handler = None def start(self): self.p = Popen( self.args, cwd=self.cwd, shell=self.shell, stdin=PIPE, stderr=PIPE, stdout=PIPE ) self.stderr = BytesIO() self.stdout = BytesIO() self._status = None self._stdin = File( self.p.stdin, channel="{0:d}.stdin".format(self.p.pid) ).register(self) self._stderr = File( self.p.stderr, channel="{0:d}.stderr".format(self.p.pid) ).register(self) self._stdout = File( self.p.stdout, channel="{0:d}.stdout".format(self.p.pid) ).register(self) self._stderr_read_handler = self.addHandler( handler("read", channel="{0:d}.stderr".format(self.p.pid))( self.__class__._on_stderr_read ) ) self._stdout_read_handler = self.addHandler( handler("read", channel="{0:d}.stdout".format(self.p.pid))( self.__class__._on_stdout_read ) ) self._stderr_closed_handler = self.addHandler( handler("closed", channel="{0:d}.stderr".format(self.p.pid))( self.__class__._on_stderr_closed ) ) self._stdout_closed_handler = self.addHandler( handler("closed", channel="{0:d}.stdout".format(self.p.pid))( self.__class__._on_stdout_closed ) ) self.fire(started(self)) @staticmethod def _on_stdout_closed(self): self._stdout_closed = True @staticmethod def _on_stderr_closed(self): self._stderr_closed = True def stop(self): if self.p is not None: self.p.terminate() def kill(self): self.p.kill() def signal(self, signal): self.p.send_signal(signal) def wait(self): return self.p.wait() def write(self, data): self.fire(write(data), "{0:d}.stdin".format(self.p.pid)) @property def status(self): if getattr(self, "p", None) is not None: return self.p.poll() @staticmethod def _on_stderr_read(self, data): self.stderr.write(data) @staticmethod def _on_stdout_read(self, data): self.stdout.write(data) @handler("generate_events") def _on_generate_events(self, event): if self.p is not None and self._status is None: self._status = self.p.poll() if self._status is not None and self._stderr_closed \ and self._stdout_closed and not self._terminated: self._terminated = True self.removeHandler(self._stderr_read_handler) self.removeHandler(self._stdout_read_handler) self.removeHandler(self._stderr_closed_handler) self.removeHandler(self._stdout_closed_handler) self.fire(stopped(self)) event.reduce_time_left(0) event.stop() else: event.reduce_time_left(TIMEOUT)
"""Timers Tests""" import pytest from time import time from operator import sub from itertools import starmap from datetime import datetime, timedelta from circuits.six.moves import map, zip from circuits import sleep, Event, Component, Timer @pytest.fixture def app(request, manager, watcher): app = App().register(manager) assert watcher.wait("registered") def finalizer(): app.unregister() assert watcher.wait("unregistered") request.addfinalizer(finalizer) return app class single(Event): """single Event""" complete = True class persistent(Event): """persistent Event""" complete = True class App(Component): def init(self): self.flag = False self.count = 0 self.timestamps = [] def single(self): self.timestamps.append(time()) self.count += 1 self.flag = True def persistent(self, interval): timer = Timer(interval, single(), persist=True) timer.register(self) yield sleep(interval * 10) timer.unregister() def test_single(app, watcher): Timer(0.1, single()).register(app) assert watcher.wait("single_complete") assert app.flag def test_persistent(app, watcher): exponent = -1 interval = 10.0 ** exponent app.fire(persistent(interval)) assert watcher.wait("persistent_complete") xs = list(map(abs, starmap(sub, zip(app.timestamps, app.timestamps[1:])))) avg = sum(xs) / len(xs) assert round(avg, abs(exponent)) == interval def test_datetime(app, watcher): now = datetime.now() d = now + timedelta(seconds=0.1) Timer(d, single()).register(app) assert watcher.wait("single_complete") assert app.flag
eriol/circuits
tests/core/test_timers.py
circuits/io/process.py
#!/usr/bin/env python # # Copyright 2015 Cisco Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """\ Dynamic inventory for Terraform - finds all `.tfstate` files below the working directory and generates an inventory based on them. """ from __future__ import unicode_literals, print_function import argparse from collections import defaultdict from functools import wraps import json import os import re import yaml VERSION = '0.3.0pre' def tfstates(root=None): root = root or os.getcwd() for dirpath, _, filenames in os.walk(root): for name in filenames: if os.path.splitext(name)[-1] == '.tfstate': yield os.path.join(dirpath, name) def iterresources(filenames): for filename in filenames: with open(filename, 'r') as json_file: state = json.load(json_file) for module in state['modules']: name = module['path'][-1] for key, resource in module['resources'].items(): yield name, key, resource ## READ RESOURCES PARSERS = {} def _clean_dc(dcname): # Consul DCs are strictly alphanumeric with underscores and hyphens - # ensure that the consul_dc attribute meets these requirements. return re.sub('[^\w_\-]', '-', dcname) def iterhosts(resources): '''yield host tuples of (name, attributes, groups)''' for module_name, key, resource in resources: resource_type, name = key.split('.', 1) try: parser = PARSERS[resource_type] except KeyError: continue yield parser(resource, module_name) def parses(prefix): def inner(func): PARSERS[prefix] = func return func return inner def calculate_mantl_vars(func): """calculate Mantl vars""" @wraps(func) def inner(*args, **kwargs): name, attrs, groups = func(*args, **kwargs) # attrs if attrs.get('role', '') == 'control': attrs['consul_is_server'] = True else: attrs['consul_is_server'] = False # groups if attrs.get('publicly_routable', False): groups.append('publicly_routable') return name, attrs, groups return inner def _get_ignore_blank(obj, key, default=None): """ Get a key in an object, but treat blank string as missing value. """ v = obj.get(key, default) if v == "": return default return v def _parse_prefix(source, prefix, sep='.'): for compkey, value in source.items(): try: curprefix, rest = compkey.split(sep, 1) except ValueError: continue if curprefix != prefix or rest == '#': continue yield rest, value def parse_attr_list(source, prefix, sep='.'): attrs = defaultdict(dict) for compkey, value in _parse_prefix(source, prefix, sep): idx, key = compkey.split(sep, 1) attrs[idx][key] = value return attrs.values() def parse_dict(source, prefix, sep='.'): return dict(_parse_prefix(source, prefix, sep)) def parse_list(source, prefix, sep='.'): return [value for _, value in _parse_prefix(source, prefix, sep)] def parse_bool(string_form): token = string_form.lower()[0] if token == 't': return True elif token == 'f': return False else: raise ValueError('could not convert %r to a bool' % string_form) @parses('triton_machine') @calculate_mantl_vars def triton_machine(resource, module_name): raw_attrs = resource['primary']['attributes'] name = raw_attrs.get('name') groups = [] attrs = { 'id': raw_attrs['id'], 'dataset': raw_attrs['dataset'], 'disk': raw_attrs['disk'], 'firewall_enabled': parse_bool(raw_attrs['firewall_enabled']), 'image': raw_attrs['image'], 'ips': parse_list(raw_attrs, 'ips'), 'memory': raw_attrs['memory'], 'name': raw_attrs['name'], 'networks': parse_list(raw_attrs, 'networks'), 'package': raw_attrs['package'], 'primary_ip': raw_attrs['primaryip'], 'root_authorized_keys': raw_attrs['root_authorized_keys'], 'state': raw_attrs['state'], 'tags': parse_dict(raw_attrs, 'tags'), 'type': raw_attrs['type'], 'user_data': raw_attrs['user_data'], 'user_script': raw_attrs['user_script'], # ansible 'ansible_ssh_host': raw_attrs['primaryip'], 'ansible_ssh_port': 22, 'ansible_ssh_user': 'root', # it's "root" on Triton by default # generic 'public_ipv4': raw_attrs['primaryip'], 'provider': 'triton', } # private IPv4 for ip in attrs['ips']: if ip.startswith('10') or ip.startswith('192.168'): # private IPs attrs['private_ipv4'] = ip break if 'private_ipv4' not in attrs: attrs['private_ipv4'] = attrs['public_ipv4'] # attrs specific to Mantl attrs.update({ 'consul_dc': _clean_dc(attrs['tags'].get('dc', 'none')), 'role': attrs['tags'].get('role', 'none'), 'ansible_python_interpreter': attrs['tags'].get('python_bin', 'python') }) # add groups based on attrs groups.append('triton_image=' + attrs['image']) groups.append('triton_package=' + attrs['package']) groups.append('triton_state=' + attrs['state']) groups.append('triton_firewall_enabled=%s' % attrs['firewall_enabled']) groups.extend('triton_tags_%s=%s' % item for item in attrs['tags'].items()) groups.extend('triton_network=' + network for network in attrs['networks']) # groups specific to Mantl groups.append('role=' + attrs['role']) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('digitalocean_droplet') @calculate_mantl_vars def digitalocean_host(resource, tfvars=None): raw_attrs = resource['primary']['attributes'] name = raw_attrs['name'] groups = [] attrs = { 'id': raw_attrs['id'], 'image': raw_attrs['image'], 'ipv4_address': raw_attrs['ipv4_address'], 'locked': parse_bool(raw_attrs['locked']), 'metadata': json.loads(raw_attrs.get('user_data', '{}')), 'region': raw_attrs['region'], 'size': raw_attrs['size'], 'ssh_keys': parse_list(raw_attrs, 'ssh_keys'), 'status': raw_attrs['status'], # ansible 'ansible_ssh_host': raw_attrs['ipv4_address'], 'ansible_ssh_port': 22, 'ansible_ssh_user': 'root', # it's always "root" on DO # generic 'public_ipv4': raw_attrs['ipv4_address'], 'private_ipv4': raw_attrs.get('ipv4_address_private', raw_attrs['ipv4_address']), 'provider': 'digitalocean', } # attrs specific to Mantl attrs.update({ 'consul_dc': _clean_dc(attrs['metadata'].get('dc', attrs['region'])), 'role': attrs['metadata'].get('role', 'none'), 'ansible_python_interpreter': attrs['metadata'].get('python_bin','python') }) # add groups based on attrs groups.append('do_image=' + attrs['image']) groups.append('do_locked=%s' % attrs['locked']) groups.append('do_region=' + attrs['region']) groups.append('do_size=' + attrs['size']) groups.append('do_status=' + attrs['status']) groups.extend('do_metadata_%s=%s' % item for item in attrs['metadata'].items()) # groups specific to Mantl groups.append('role=' + attrs['role']) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('softlayer_virtualserver') @calculate_mantl_vars def softlayer_host(resource, module_name): raw_attrs = resource['primary']['attributes'] name = raw_attrs['name'] groups = [] attrs = { 'id': raw_attrs['id'], 'image': raw_attrs['image'], 'ipv4_address': raw_attrs['ipv4_address'], 'metadata': json.loads(raw_attrs.get('user_data', '{}')), 'region': raw_attrs['region'], 'ram': raw_attrs['ram'], 'cpu': raw_attrs['cpu'], 'ssh_keys': parse_list(raw_attrs, 'ssh_keys'), 'public_ipv4': raw_attrs['ipv4_address'], 'private_ipv4': raw_attrs['ipv4_address_private'], 'ansible_ssh_host': raw_attrs['ipv4_address'], 'ansible_ssh_port': 22, 'ansible_ssh_user': 'root', 'provider': 'softlayer', } # attrs specific to Mantl attrs.update({ 'consul_dc': _clean_dc(attrs['metadata'].get('dc', attrs['region'])), 'role': attrs['metadata'].get('role', 'none'), 'ansible_python_interpreter': attrs['metadata'].get('python_bin','python') }) # groups specific to Mantl groups.append('role=' + attrs['role']) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('openstack_compute_instance_v2') @calculate_mantl_vars def openstack_host(resource, module_name): raw_attrs = resource['primary']['attributes'] name = raw_attrs['name'] groups = [] attrs = { 'access_ip_v4': raw_attrs['access_ip_v4'], 'access_ip_v6': raw_attrs['access_ip_v6'], 'flavor': parse_dict(raw_attrs, 'flavor', sep='_'), 'id': raw_attrs['id'], 'image': parse_dict(raw_attrs, 'image', sep='_'), 'key_pair': raw_attrs['key_pair'], 'metadata': parse_dict(raw_attrs, 'metadata'), 'network': parse_attr_list(raw_attrs, 'network'), 'region': raw_attrs.get('region', ''), 'security_groups': parse_list(raw_attrs, 'security_groups'), # ansible 'ansible_ssh_port': 22, # workaround for an OpenStack bug where hosts have a different domain # after they're restarted 'host_domain': 'novalocal', 'use_host_domain': True, # generic 'public_ipv4': raw_attrs['access_ip_v4'], 'private_ipv4': raw_attrs['access_ip_v4'], 'provider': 'openstack', } if 'floating_ip' in raw_attrs: attrs['private_ipv4'] = raw_attrs['network.0.fixed_ip_v4'] try: attrs.update({ 'ansible_host': raw_attrs['access_ip_v4'], 'publicly_routable': True, }) except (KeyError, ValueError): attrs.update({'ansible_host': '', 'publicly_routable': False}) # attrs specific to Ansible if 'metadata.ssh_user' in raw_attrs: attrs['ansible_ssh_user'] = raw_attrs['metadata.ssh_user'] # attrs specific to Mantl attrs.update({ 'consul_dc': _clean_dc(attrs['metadata'].get('dc', module_name)), 'role': attrs['metadata'].get('role', 'none'), 'ansible_python_interpreter': attrs['metadata'].get('python_bin','python') }) # add groups based on attrs groups.append('os_image=' + attrs['image']['name']) groups.append('os_flavor=' + attrs['flavor']['name']) groups.extend('os_metadata_%s=%s' % item for item in attrs['metadata'].items()) groups.append('os_region=' + attrs['region']) # groups specific to Mantl groups.append('role=' + attrs['metadata'].get('role', 'none')) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('aws_instance') @calculate_mantl_vars def aws_host(resource, module_name): name = resource['primary']['attributes']['tags.Name'] raw_attrs = resource['primary']['attributes'] groups = [] attrs = { 'ami': raw_attrs['ami'], 'availability_zone': raw_attrs['availability_zone'], 'ebs_block_device': parse_attr_list(raw_attrs, 'ebs_block_device'), 'ebs_optimized': parse_bool(raw_attrs['ebs_optimized']), 'ephemeral_block_device': parse_attr_list(raw_attrs, 'ephemeral_block_device'), 'id': raw_attrs['id'], 'key_name': raw_attrs['key_name'], 'private': parse_dict(raw_attrs, 'private', sep='_'), 'public': parse_dict(raw_attrs, 'public', sep='_'), 'root_block_device': parse_attr_list(raw_attrs, 'root_block_device'), 'security_groups': parse_list(raw_attrs, 'security_groups'), 'subnet': parse_dict(raw_attrs, 'subnet', sep='_'), 'tags': parse_dict(raw_attrs, 'tags'), 'tenancy': raw_attrs['tenancy'], 'vpc_security_group_ids': parse_list(raw_attrs, 'vpc_security_group_ids'), # ansible-specific 'ansible_ssh_port': 22, 'ansible_ssh_host': raw_attrs['public_ip'], # generic 'public_ipv4': raw_attrs['public_ip'], 'private_ipv4': raw_attrs['private_ip'], 'provider': 'aws', } # attrs specific to Ansible if 'tags.sshUser' in raw_attrs: attrs['ansible_ssh_user'] = raw_attrs['tags.sshUser'] if 'tags.sshPrivateIp' in raw_attrs: attrs['ansible_ssh_host'] = raw_attrs['private_ip'] # attrs specific to Mantl attrs.update({ 'consul_dc': _clean_dc(attrs['tags'].get('dc', module_name)), 'role': attrs['tags'].get('role', 'none'), 'ansible_python_interpreter': attrs['tags'].get('python_bin','python') }) # groups specific to Mantl groups.extend(['aws_ami=' + attrs['ami'], 'aws_az=' + attrs['availability_zone'], 'aws_key_name=' + attrs['key_name'], 'aws_tenancy=' + attrs['tenancy']]) groups.extend('aws_tag_%s=%s' % item for item in attrs['tags'].items()) groups.extend('aws_vpc_security_group=' + group for group in attrs['vpc_security_group_ids']) groups.extend('aws_subnet_%s=%s' % subnet for subnet in attrs['subnet'].items()) # groups specific to Mantl groups.append('role=' + attrs['role']) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('digitalocean_droplet') @calculate_mi_vars def digitalocean_host(resource, tfvars=None): raw_attrs = resource['primary']['attributes'] groups = [] # general attrs attrs = { 'name': raw_attrs['name'], 'metadata': yaml.load(raw_attrs['user_data']), 'region': raw_attrs['region'], 'size': raw_attrs['size'], # ansible 'ansible_port': 22, # Could be passed from the command line via environment variable 'ansible_user': 'root', 'ansible_host': raw_attrs['ipv4_address'], } # attrs specific to microservices-infrastructure attrs.update({ 'consul_dc': _clean_dc(attrs['metadata'].get('dc', attrs['region'])), 'role': attrs['metadata'].get('role', 'none') }) # groups specific to microservices-infrastructure name = attrs.get('name') groups.append('region=' + attrs['region']) groups.append('role=' + attrs['role']) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('google_compute_instance') @calculate_mantl_vars def gce_host(resource, module_name): name = resource['primary']['id'] raw_attrs = resource['primary']['attributes'] groups = [] # network interfaces interfaces = parse_attr_list(raw_attrs, 'network_interface') for interface in interfaces: interface['access_config'] = parse_attr_list(interface, 'access_config') for key in interface.keys(): if '.' in key: del interface[key] # general attrs attrs = { 'can_ip_forward': raw_attrs['can_ip_forward'] == 'true', 'disks': parse_attr_list(raw_attrs, 'disk'), 'machine_type': raw_attrs['machine_type'], 'metadata': parse_dict(raw_attrs, 'metadata'), 'network': parse_attr_list(raw_attrs, 'network'), 'network_interface': interfaces, 'self_link': raw_attrs['self_link'], 'service_account': parse_attr_list(raw_attrs, 'service_account'), 'tags': parse_list(raw_attrs, 'tags'), 'zone': raw_attrs['zone'], # ansible 'ansible_ssh_port': 22, 'provider': 'gce', } # attrs specific to Ansible if 'metadata.ssh_user' in raw_attrs: attrs['ansible_ssh_user'] = raw_attrs['metadata.ssh_user'] # attrs specific to Mantl attrs.update({ 'consul_dc': _clean_dc(attrs['metadata'].get('dc', module_name)), 'role': attrs['metadata'].get('role', 'none'), 'ansible_python_interpreter': attrs['metadata'].get('python_bin','python') }) try: attrs.update({ 'ansible_ssh_host': interfaces[0]['access_config'][0]['nat_ip'] or interfaces[0]['access_config'][0]['assigned_nat_ip'], 'public_ipv4': interfaces[0]['access_config'][0]['nat_ip'] or interfaces[0]['access_config'][0]['assigned_nat_ip'], 'private_ipv4': interfaces[0]['address'], 'publicly_routable': True, }) except (KeyError, ValueError): attrs.update({'ansible_host': '', 'publicly_routable': False}) # add groups based on attrs groups.extend('gce_image=' + disk['image'] for disk in attrs['disks']) groups.append('gce_machine_type=' + attrs['machine_type']) groups.extend('gce_metadata_%s=%s' % (key, value) for (key, value) in attrs['metadata'].items() if key not in set(['sshKeys'])) groups.extend('gce_tag=' + tag for tag in attrs['tags']) groups.append('gce_zone=' + attrs['zone']) if attrs['can_ip_forward']: groups.append('gce_ip_forward') if attrs['publicly_routable']: groups.append('gce_publicly_routable') # groups specific to Mantl groups.append('role=' + attrs['metadata'].get('role', 'none')) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('azure_instance') @calculate_mi_vars def azure_host(resource, module_name): name = resource['primary']['attributes']['name'] raw_attrs = resource['primary']['attributes'] groups = [] attrs = { 'automatic_updates': raw_attrs['automatic_updates'], 'description': raw_attrs['description'], 'hosted_service_name': raw_attrs['hosted_service_name'], 'id': raw_attrs['id'], 'image': raw_attrs['image'], 'ip_address': raw_attrs['ip_address'], 'location': raw_attrs['location'], 'name': raw_attrs['name'], 'reverse_dns': raw_attrs['reverse_dns'], 'security_group': raw_attrs['security_group'], 'size': raw_attrs['size'], 'ssh_key_thumbprint': raw_attrs['ssh_key_thumbprint'], 'subnet': raw_attrs['subnet'], 'username': raw_attrs['username'], 'vip_address': raw_attrs.get('vip_address'), 'virtual_network': raw_attrs.get('virtual_network'), 'endpoint': parse_attr_list(raw_attrs, 'endpoint'), # ansible 'ansible_port': 22, 'ansible_user': raw_attrs['username'], 'ansible_host': raw_attrs.get('vip_address', raw_attrs['ip_address']), } # attrs specific to microservices-infrastructure attrs.update({ 'consul_dc': attrs['location'].lower().replace(" ", "-"), 'role': attrs['description'] }) # groups specific to microservices-infrastructure groups.extend(['azure_image=' + attrs['image'], 'azure_location=' + attrs['location'].lower().replace(" ", "-"), 'azure_username=' + attrs['username'], 'azure_security_group=' + attrs['security_group']]) # groups specific to microservices-infrastructure groups.append('role=' + attrs['role']) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('vsphere_virtual_machine') @calculate_mantl_vars def vsphere_host(resource, module_name): raw_attrs = resource['primary']['attributes'] network_attrs = parse_dict(raw_attrs, 'network_interface') network = parse_dict(network_attrs, '0') ip_address = network.get('ipv4_address', network['ip_address']) name = raw_attrs['name'] groups = [] attrs = { 'id': raw_attrs['id'], 'ip_address': ip_address, 'private_ipv4': ip_address, 'public_ipv4': ip_address, 'metadata': parse_dict(raw_attrs, 'custom_configuration_parameters'), 'ansible_ssh_port': 22, 'provider': 'vsphere', } try: attrs.update({ 'ansible_ssh_host': ip_address, }) except (KeyError, ValueError): attrs.update({'ansible_ssh_host': '', }) attrs.update({ 'consul_dc': _clean_dc(attrs['metadata'].get('consul_dc', module_name)), 'role': attrs['metadata'].get('role', 'none'), 'ansible_python_interpreter': attrs['metadata'].get('python_bin','python') }) # attrs specific to Ansible if 'ssh_user' in attrs['metadata']: attrs['ansible_ssh_user'] = attrs['metadata']['ssh_user'] groups.append('role=' + attrs['role']) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('azurerm_virtual_machine') @calculate_mantl_vars def azurerm_host(resource, module_name): name = resource['primary']['attributes']['name'] raw_attrs = resource['primary']['attributes'] groups = [] attrs = { 'id': raw_attrs['id'], 'name': raw_attrs['name'], # ansible 'ansible_ssh_port': 22, 'ansible_ssh_user': raw_attrs.get('tags.ssh_user', ''), 'ansible_ssh_host': raw_attrs.get('tags.ssh_ip', ''), } groups.append('role=' + raw_attrs.get('tags.role', '')) return name, attrs, groups @parses('azure_instance') @calculate_mantl_vars def azure_host(resource, module_name): name = resource['primary']['attributes']['name'] raw_attrs = resource['primary']['attributes'] groups = [] attrs = { 'automatic_updates': raw_attrs['automatic_updates'], 'description': raw_attrs['description'], 'hosted_service_name': raw_attrs['hosted_service_name'], 'id': raw_attrs['id'], 'image': raw_attrs['image'], 'ip_address': raw_attrs['ip_address'], 'location': raw_attrs['location'], 'name': raw_attrs['name'], 'reverse_dns': raw_attrs['reverse_dns'], 'security_group': raw_attrs['security_group'], 'size': raw_attrs['size'], 'ssh_key_thumbprint': raw_attrs['ssh_key_thumbprint'], 'subnet': raw_attrs['subnet'], 'username': raw_attrs['username'], 'vip_address': raw_attrs['vip_address'], 'virtual_network': raw_attrs['virtual_network'], 'endpoint': parse_attr_list(raw_attrs, 'endpoint'), # ansible 'ansible_ssh_port': 22, 'ansible_ssh_user': raw_attrs['username'], 'ansible_ssh_host': raw_attrs['vip_address'], } # attrs specific to mantl attrs.update({ 'consul_dc': attrs['location'].lower().replace(" ", "-"), 'role': attrs['description'] }) # groups specific to mantl groups.extend(['azure_image=' + attrs['image'], 'azure_location=' + attrs['location'].lower().replace(" ", "-"), 'azure_username=' + attrs['username'], 'azure_security_group=' + attrs['security_group']]) # groups specific to mantl groups.append('role=' + attrs['role']) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('clc_server') @calculate_mantl_vars def clc_server(resource, module_name): raw_attrs = resource['primary']['attributes'] name = raw_attrs.get('id') groups = [] md = parse_dict(raw_attrs, 'metadata') attrs = { 'metadata': md, 'ansible_ssh_port': md.get('ssh_port', 22), 'ansible_ssh_user': md.get('ssh_user', 'root'), 'provider': 'clc', 'publicly_routable': False, } try: attrs.update({ 'public_ipv4': raw_attrs['public_ip_address'], 'private_ipv4': raw_attrs['private_ip_address'], 'ansible_ssh_host': raw_attrs['public_ip_address'], 'publicly_routable': True, }) except (KeyError, ValueError): attrs.update({ 'ansible_ssh_host': raw_attrs['private_ip_address'], 'private_ipv4': raw_attrs['private_ip_address'], }) attrs.update({ 'consul_dc': _clean_dc(attrs['metadata'].get('dc', module_name)), 'role': attrs['metadata'].get('role', 'none'), }) groups.append('role=' + attrs['role']) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('ucs_service_profile') @calculate_mantl_vars def ucs_host(resource, module_name): name = resource['primary']['id'] raw_attrs = resource['primary']['attributes'] groups = [] # general attrs attrs = { 'metadata': parse_dict(raw_attrs, 'metadata'), 'provider': 'ucs', } # attrs specific to mantl attrs.update({ 'consul_dc': _clean_dc(attrs['metadata'].get('dc', module_name)), 'role': attrs['metadata'].get('role', 'none'), }) try: attrs.update({ 'ansible_ssh_host': raw_attrs['vNIC.0.ip'], 'public_ipv4': raw_attrs['vNIC.0.ip'], 'private_ipv4': raw_attrs['vNIC.0.ip'] }) except (KeyError, ValueError): attrs.update({'ansible_ssh_host': '', 'publicly_routable': False}) # add groups based on attrs groups.append('role=' + attrs['role']) #.get('role', 'none')) # groups.append('all:children') groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups ## QUERY TYPES def query_host(hosts, target): for name, attrs, _ in hosts: if name == target: return attrs return {} def query_list(hosts): groups = defaultdict(dict) meta = {} for name, attrs, hostgroups in hosts: for group in set(hostgroups): groups[group].setdefault('hosts', []) groups[group]['hosts'].append(name) meta[name] = attrs groups['_meta'] = {'hostvars': meta} return groups def query_hostfile(hosts): out = ['## begin hosts generated by terraform.py ##'] out.extend( '{}\t{}'.format(attrs['ansible_ssh_host'].ljust(16), name) for name, attrs, _ in hosts ) out.append('## end hosts generated by terraform.py ##') return '\n'.join(out) def main(): parser = argparse.ArgumentParser( __file__, __doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) modes = parser.add_mutually_exclusive_group(required=True) modes.add_argument('--list', action='store_true', help='list all variables') modes.add_argument('--host', help='list variables for a single host') modes.add_argument('--version', action='store_true', help='print version and exit') modes.add_argument('--hostfile', action='store_true', help='print hosts as a /etc/hosts snippet') parser.add_argument('--pretty', action='store_true', help='pretty-print output JSON') parser.add_argument('--nometa', action='store_true', help='with --list, exclude hostvars') default_root = os.environ.get('TERRAFORM_STATE_ROOT', os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', ))) parser.add_argument('--root', default=default_root, help='custom root to search for `.tfstate`s in') args = parser.parse_args() if args.version: print('%s %s' % (__file__, VERSION)) parser.exit() hosts = iterhosts(iterresources(tfstates(args.root))) if args.list: output = query_list(hosts) if args.nometa: del output['_meta'] print(json.dumps(output, indent=4 if args.pretty else None)) elif args.host: output = query_host(hosts, args.host) print(json.dumps(output, indent=4 if args.pretty else None)) elif args.hostfile: output = query_hostfile(hosts) print(output) parser.exit() if __name__ == '__main__': main()
# -*- coding: utf-8 -*- import pytest @pytest.fixture def openstack_host(): from terraform import openstack_host return openstack_host @pytest.fixture def openstack_resource(): return { "type": "openstack_compute_instance_v2", "primary": { "id": "81467bb5-e214-4764-b501-8b59892d88e2", "attributes": { "access_ip_v4": "173.39.243.27", "access_ip_v6": "", "flavor_id": "c6580dce-3cf4-488a-8eb0-8d3766c5e6f7", "flavor_name": "CO2-2XLarge", "id": "81467bb5-e214-4764-b501-8b59892d88e2", "image_id": "1bbd1f1f-d32c-43ca-a727-3b2b075c3e28", "image_name": "centos-7_x86_64-2015-01-27-v6", "key_pair": "ansible_pubkey_sborrelli", "metadata.#": "1", "metadata.role": "control", "name": "mi-control-01", "network.#": "1", "network.0.fixed_ip_v4": "173.39.243.27", "network.0.fixed_ip_v6": "", "network.0.mac": "fa:16:3e:12:13:d7", "network.0.name": "public-direct-600", "network.0.port": "", "network.0.uuid": "e7b1be4f-e0d2-4024-948f-b9e6c4911123", "region": "eu-amsterdam-1", "security_groups.#": "1", "security_groups.0": "default" } } } def test_name(openstack_resource, openstack_host): name, _, _ = openstack_host(openstack_resource, '') assert name == 'mi-control-01' @pytest.mark.parametrize('attr,should', { 'access_ip_v4': '173.39.243.27', 'access_ip_v6': '', 'flavor': { 'id': 'c6580dce-3cf4-488a-8eb0-8d3766c5e6f7', 'name': 'CO2-2XLarge', }, 'id': '81467bb5-e214-4764-b501-8b59892d88e2', 'image': { 'id': '1bbd1f1f-d32c-43ca-a727-3b2b075c3e28', 'name': 'centos-7_x86_64-2015-01-27-v6', }, 'key_pair': 'ansible_pubkey_sborrelli', 'metadata': {'role': 'control', }, 'network': [{ 'fixed_ip_v4': '173.39.243.27', 'fixed_ip_v6': '', 'mac': 'fa:16:3e:12:13:d7', 'name': 'public-direct-600', 'port': '', 'uuid': 'e7b1be4f-e0d2-4024-948f-b9e6c4911123' }], 'region': 'eu-amsterdam-1', 'security_groups': ['default'], # ansible 'ansible_ssh_port': 22, 'ansible_ssh_host': '173.39.243.27', 'publicly_routable': True, # mi 'consul_dc': 'module_name', 'role': 'control', # and the bugfix 'use_host_domain': True, 'host_domain': 'novalocal', # generic 'public_ipv4': '173.39.243.27', 'private_ipv4': '173.39.243.27', 'provider': 'openstack', }.items()) def test_attrs(openstack_resource, openstack_host, attr, should): _, attrs, _ = openstack_host(openstack_resource, 'module_name') assert attr in attrs assert attrs[attr] == should @pytest.mark.parametrize('group', [ 'os_image=centos-7_x86_64-2015-01-27-v6', 'os_flavor=CO2-2XLarge', 'os_metadata_role=control', 'os_region=eu-amsterdam-1', 'role=control', 'dc=module_name', ]) def test_groups(openstack_resource, openstack_host, group): _, _, groups = openstack_host(openstack_resource, 'module_name') assert group in groups
Capgemini/terraform.py
tests/test_openstack.py
terraform.py
# This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.15 (https://github.com/warner/python-versioneer) import errno import os import re import subprocess import sys from pandas.compat import PY3 def get_keywords(): # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "$Format:%d$" git_full = "$Format:%H$" keywords = {"refnames": git_refnames, "full": git_full} return keywords class VersioneerConfig(object): pass def get_config(): # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "pep440" cfg.tag_prefix = "v" cfg.parentdir_prefix = "pandas-" cfg.versionfile_source = "pandas/_version.py" cfg.verbose = False return cfg class NotThisMethod(Exception): pass LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator def decorate(f): if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run {dispcmd}".format(dispcmd=dispcmd)) print(e) return None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None stdout = p.communicate()[0].strip() if PY3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run {dispcmd} (error)".format(dispcmd=dispcmd)) return None return stdout def versions_from_parentdir(parentdir_prefix, root, verbose): # Source tarballs conventionally unpack into a directory that includes # both the project name and a version string. dirname = os.path.basename(root) if not dirname.startswith(parentdir_prefix): if verbose: print("guessing rootdir is '{root}', but '{dirname}' " "doesn't start with prefix '{parentdir_prefix}'".format( root=root, dirname=dirname, parentdir_prefix=parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None} @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): if not keywords: raise NotThisMethod("no keywords at all, weird") refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = {r.strip() for r in refnames.strip("()").split(",")} # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = {r for r in refs if re.search(r'\d', r)} if verbose: print("discarding '{}', no digits".format(",".join(refs - tags))) if verbose: print("likely tags: {}".format(",".join(sorted(tags)))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking {r}".format(r=r)) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None } # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags"} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # this runs 'git' from the root of the source tree. This only gets called # if the git-archive 'subst' keywords were *not* expanded, and # _version.py hasn't already been rewritten with a short version string, # meaning we're inside a checked out source tree. if not os.path.exists(os.path.join(root, ".git")): if verbose: print("no .git in {root}".format(root=root)) raise NotThisMethod("no .git directory") GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] # if there is a tag, this yields TAG-NUM-gHEX[-dirty] # if there are no tags, this yields HEX[-dirty] (no NUM) describe_out = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long"], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: " "'{describe_out}'".format( describe_out=describe_out)) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '{full_tag}' doesn't start with prefix " \ "'{tag_prefix}'" print(fmt.format(full_tag=full_tag, tag_prefix=tag_prefix)) pieces["error"] = ("tag '{full_tag}' doesn't start with " "prefix '{tag_prefix}'".format( full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits return pieces def plus_or_dot(pieces): if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): # now build up version string, with post-release "local version # identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty # exceptions: # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "{:d}.g{}".format(pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.{:d}.g{}".format(pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): # TAG[.post.devDISTANCE] . No -dirty # exceptions: # 1: no tags. 0.post.devDISTANCE if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): # TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that # .dev0 sorts backwards (a dirty tree will appear "older" than the # corresponding clean one), but you shouldn't be releasing software with # -dirty anyways. # exceptions: # 1: no tags. 0.postDISTANCE[.dev0] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post{:d}".format(pieces["distance"]) if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g{}".format(pieces["short"]) else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g{}".format(pieces["short"]) return rendered def render_pep440_old(pieces): # TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. # exceptions: # 1: no tags. 0.postDISTANCE[.dev0] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty # --always' # exceptions: # 1: no tags. HEX[-dirty] (note: no 'g' prefix) if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-{:d}-g{}".format(pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty # --always -long'. The distance/hash is unconditional. # exceptions: # 1: no tags. HEX[-dirty] (note: no 'g' prefix) if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-{:d}-g{}".format(pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"]} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '{style}'".format(style=style)) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None} def get_versions(): # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree"} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version"}
# -*- coding: utf-8 -*- from datetime import datetime import pytest import pytz from pandas.errors import NullFrequencyError import pandas as pd from pandas import DatetimeIndex, Series, date_range import pandas.util.testing as tm class TestDatetimeIndexArithmetic(object): # ------------------------------------------------------------- # DatetimeIndex.shift is used in integer addition def test_dti_shift_tzaware(self, tz_naive_fixture): # GH#9903 tz = tz_naive_fixture idx = pd.DatetimeIndex([], name='xxx', tz=tz) tm.assert_index_equal(idx.shift(0, freq='H'), idx) tm.assert_index_equal(idx.shift(3, freq='H'), idx) idx = pd.DatetimeIndex(['2011-01-01 10:00', '2011-01-01 11:00', '2011-01-01 12:00'], name='xxx', tz=tz) tm.assert_index_equal(idx.shift(0, freq='H'), idx) exp = pd.DatetimeIndex(['2011-01-01 13:00', '2011-01-01 14:00', '2011-01-01 15:00'], name='xxx', tz=tz) tm.assert_index_equal(idx.shift(3, freq='H'), exp) exp = pd.DatetimeIndex(['2011-01-01 07:00', '2011-01-01 08:00', '2011-01-01 09:00'], name='xxx', tz=tz) tm.assert_index_equal(idx.shift(-3, freq='H'), exp) def test_dti_shift_freqs(self): # test shift for DatetimeIndex and non DatetimeIndex # GH#8083 drange = pd.date_range('20130101', periods=5) result = drange.shift(1) expected = pd.DatetimeIndex(['2013-01-02', '2013-01-03', '2013-01-04', '2013-01-05', '2013-01-06'], freq='D') tm.assert_index_equal(result, expected) result = drange.shift(-1) expected = pd.DatetimeIndex(['2012-12-31', '2013-01-01', '2013-01-02', '2013-01-03', '2013-01-04'], freq='D') tm.assert_index_equal(result, expected) result = drange.shift(3, freq='2D') expected = pd.DatetimeIndex(['2013-01-07', '2013-01-08', '2013-01-09', '2013-01-10', '2013-01-11'], freq='D') tm.assert_index_equal(result, expected) def test_dti_shift_int(self): rng = date_range('1/1/2000', periods=20) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): # GH#22535 result = rng + 5 expected = rng.shift(5) tm.assert_index_equal(result, expected) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): # GH#22535 result = rng - 5 expected = rng.shift(-5) tm.assert_index_equal(result, expected) def test_dti_shift_no_freq(self): # GH#19147 dti = pd.DatetimeIndex(['2011-01-01 10:00', '2011-01-01'], freq=None) with pytest.raises(NullFrequencyError): dti.shift(2) @pytest.mark.parametrize('tzstr', ['US/Eastern', 'dateutil/US/Eastern']) def test_dti_shift_localized(self, tzstr): dr = date_range('2011/1/1', '2012/1/1', freq='W-FRI') dr_tz = dr.tz_localize(tzstr) result = dr_tz.shift(1, '10T') assert result.tz == dr_tz.tz def test_dti_shift_across_dst(self): # GH 8616 idx = date_range('2013-11-03', tz='America/Chicago', periods=7, freq='H') s = Series(index=idx[:-1]) result = s.shift(freq='H') expected = Series(index=idx[1:]) tm.assert_series_equal(result, expected) @pytest.mark.parametrize('shift, result_time', [ [0, '2014-11-14 00:00:00'], [-1, '2014-11-13 23:00:00'], [1, '2014-11-14 01:00:00']]) def test_dti_shift_near_midnight(self, shift, result_time): # GH 8616 dt = datetime(2014, 11, 14, 0) dt_est = pytz.timezone('EST').localize(dt) s = Series(data=[1], index=[dt_est]) result = s.shift(shift, freq='H') expected = Series(1, index=DatetimeIndex([result_time], tz='EST')) tm.assert_series_equal(result, expected)
dsm054/pandas
pandas/tests/indexes/datetimes/test_arithmetic.py
pandas/_version.py
""" Read SAS7BDAT files Based on code written by Jared Hobbs: https://bitbucket.org/jaredhobbs/sas7bdat See also: https://github.com/BioStatMatt/sas7bdat Partial documentation of the file format: https://cran.r-project.org/web/packages/sas7bdat/vignettes/sas7bdat.pdf Reference for binary data compression: http://collaboration.cmc.ec.gc.ca/science/rpn/biblio/ddj/Website/articles/CUJ/1992/9210/ross/ross.htm """ from datetime import datetime import struct import numpy as np from pandas.errors import EmptyDataError import pandas as pd from pandas import compat from pandas.io.common import BaseIterator, get_filepath_or_buffer from pandas.io.sas._sas import Parser import pandas.io.sas.sas_constants as const class _subheader_pointer(object): pass class _column(object): pass # SAS7BDAT represents a SAS data file in SAS7BDAT format. class SAS7BDATReader(BaseIterator): """ Read SAS files in SAS7BDAT format. Parameters ---------- path_or_buf : path name or buffer Name of SAS file or file-like object pointing to SAS file contents. index : column identifier, defaults to None Column to use as index. convert_dates : boolean, defaults to True Attempt to convert dates to Pandas datetime values. Note that some rarely used SAS date formats may be unsupported. blank_missing : boolean, defaults to True Convert empty strings to missing values (SAS uses blanks to indicate missing character variables). chunksize : int, defaults to None Return SAS7BDATReader object for iterations, returns chunks with given number of lines. encoding : string, defaults to None String encoding. convert_text : bool, defaults to True If False, text variables are left as raw bytes. convert_header_text : bool, defaults to True If False, header text, including column names, are left as raw bytes. """ def __init__(self, path_or_buf, index=None, convert_dates=True, blank_missing=True, chunksize=None, encoding=None, convert_text=True, convert_header_text=True): self.index = index self.convert_dates = convert_dates self.blank_missing = blank_missing self.chunksize = chunksize self.encoding = encoding self.convert_text = convert_text self.convert_header_text = convert_header_text self.default_encoding = "latin-1" self.compression = "" self.column_names_strings = [] self.column_names = [] self.column_formats = [] self.columns = [] self._current_page_data_subheader_pointers = [] self._cached_page = None self._column_data_lengths = [] self._column_data_offsets = [] self._column_types = [] self._current_row_in_file_index = 0 self._current_row_on_page_index = 0 self._current_row_in_file_index = 0 self._path_or_buf, _, _, _ = get_filepath_or_buffer(path_or_buf) if isinstance(self._path_or_buf, compat.string_types): self._path_or_buf = open(self._path_or_buf, 'rb') self.handle = self._path_or_buf self._get_properties() self._parse_metadata() def column_data_lengths(self): """Return a numpy int64 array of the column data lengths""" return np.asarray(self._column_data_lengths, dtype=np.int64) def column_data_offsets(self): """Return a numpy int64 array of the column offsets""" return np.asarray(self._column_data_offsets, dtype=np.int64) def column_types(self): """Returns a numpy character array of the column types: s (string) or d (double)""" return np.asarray(self._column_types, dtype=np.dtype('S1')) def close(self): try: self.handle.close() except AttributeError: pass def _get_properties(self): # Check magic number self._path_or_buf.seek(0) self._cached_page = self._path_or_buf.read(288) if self._cached_page[0:len(const.magic)] != const.magic: self.close() raise ValueError("magic number mismatch (not a SAS file?)") # Get alignment information align1, align2 = 0, 0 buf = self._read_bytes(const.align_1_offset, const.align_1_length) if buf == const.u64_byte_checker_value: align2 = const.align_2_value self.U64 = True self._int_length = 8 self._page_bit_offset = const.page_bit_offset_x64 self._subheader_pointer_length = const.subheader_pointer_length_x64 else: self.U64 = False self._page_bit_offset = const.page_bit_offset_x86 self._subheader_pointer_length = const.subheader_pointer_length_x86 self._int_length = 4 buf = self._read_bytes(const.align_2_offset, const.align_2_length) if buf == const.align_1_checker_value: align1 = const.align_2_value total_align = align1 + align2 # Get endianness information buf = self._read_bytes(const.endianness_offset, const.endianness_length) if buf == b'\x01': self.byte_order = "<" else: self.byte_order = ">" # Get encoding information buf = self._read_bytes(const.encoding_offset, const.encoding_length)[0] if buf in const.encoding_names: self.file_encoding = const.encoding_names[buf] else: self.file_encoding = "unknown (code=%s)" % str(buf) # Get platform information buf = self._read_bytes(const.platform_offset, const.platform_length) if buf == b'1': self.platform = "unix" elif buf == b'2': self.platform = "windows" else: self.platform = "unknown" buf = self._read_bytes(const.dataset_offset, const.dataset_length) self.name = buf.rstrip(b'\x00 ') if self.convert_header_text: self.name = self.name.decode( self.encoding or self.default_encoding) buf = self._read_bytes(const.file_type_offset, const.file_type_length) self.file_type = buf.rstrip(b'\x00 ') if self.convert_header_text: self.file_type = self.file_type.decode( self.encoding or self.default_encoding) # Timestamp is epoch 01/01/1960 epoch = datetime(1960, 1, 1) x = self._read_float(const.date_created_offset + align1, const.date_created_length) self.date_created = epoch + pd.to_timedelta(x, unit='s') x = self._read_float(const.date_modified_offset + align1, const.date_modified_length) self.date_modified = epoch + pd.to_timedelta(x, unit='s') self.header_length = self._read_int(const.header_size_offset + align1, const.header_size_length) # Read the rest of the header into cached_page. buf = self._path_or_buf.read(self.header_length - 288) self._cached_page += buf if len(self._cached_page) != self.header_length: self.close() raise ValueError("The SAS7BDAT file appears to be truncated.") self._page_length = self._read_int(const.page_size_offset + align1, const.page_size_length) self._page_count = self._read_int(const.page_count_offset + align1, const.page_count_length) buf = self._read_bytes(const.sas_release_offset + total_align, const.sas_release_length) self.sas_release = buf.rstrip(b'\x00 ') if self.convert_header_text: self.sas_release = self.sas_release.decode( self.encoding or self.default_encoding) buf = self._read_bytes(const.sas_server_type_offset + total_align, const.sas_server_type_length) self.server_type = buf.rstrip(b'\x00 ') if self.convert_header_text: self.server_type = self.server_type.decode( self.encoding or self.default_encoding) buf = self._read_bytes(const.os_version_number_offset + total_align, const.os_version_number_length) self.os_version = buf.rstrip(b'\x00 ') if self.convert_header_text: self.os_version = self.os_version.decode( self.encoding or self.default_encoding) buf = self._read_bytes(const.os_name_offset + total_align, const.os_name_length) buf = buf.rstrip(b'\x00 ') if len(buf) > 0: self.os_name = buf.decode(self.encoding or self.default_encoding) else: buf = self._read_bytes(const.os_maker_offset + total_align, const.os_maker_length) self.os_name = buf.rstrip(b'\x00 ') if self.convert_header_text: self.os_name = self.os_name.decode( self.encoding or self.default_encoding) def __next__(self): da = self.read(nrows=self.chunksize or 1) if da is None: raise StopIteration return da # Read a single float of the given width (4 or 8). def _read_float(self, offset, width): if width not in (4, 8): self.close() raise ValueError("invalid float width") buf = self._read_bytes(offset, width) fd = "f" if width == 4 else "d" return struct.unpack(self.byte_order + fd, buf)[0] # Read a single signed integer of the given width (1, 2, 4 or 8). def _read_int(self, offset, width): if width not in (1, 2, 4, 8): self.close() raise ValueError("invalid int width") buf = self._read_bytes(offset, width) it = {1: "b", 2: "h", 4: "l", 8: "q"}[width] iv = struct.unpack(self.byte_order + it, buf)[0] return iv def _read_bytes(self, offset, length): if self._cached_page is None: self._path_or_buf.seek(offset) buf = self._path_or_buf.read(length) if len(buf) < length: self.close() msg = "Unable to read {:d} bytes from file position {:d}." raise ValueError(msg.format(length, offset)) return buf else: if offset + length > len(self._cached_page): self.close() raise ValueError("The cached page is too small.") return self._cached_page[offset:offset + length] def _parse_metadata(self): done = False while not done: self._cached_page = self._path_or_buf.read(self._page_length) if len(self._cached_page) <= 0: break if len(self._cached_page) != self._page_length: self.close() raise ValueError( "Failed to read a meta data page from the SAS file.") done = self._process_page_meta() def _process_page_meta(self): self._read_page_header() pt = [const.page_meta_type, const.page_amd_type] + const.page_mix_types if self._current_page_type in pt: self._process_page_metadata() is_data_page = self._current_page_type & const.page_data_type is_mix_page = self._current_page_type in const.page_mix_types return (is_data_page or is_mix_page or self._current_page_data_subheader_pointers != []) def _read_page_header(self): bit_offset = self._page_bit_offset tx = const.page_type_offset + bit_offset self._current_page_type = self._read_int(tx, const.page_type_length) tx = const.block_count_offset + bit_offset self._current_page_block_count = self._read_int( tx, const.block_count_length) tx = const.subheader_count_offset + bit_offset self._current_page_subheaders_count = ( self._read_int(tx, const.subheader_count_length)) def _process_page_metadata(self): bit_offset = self._page_bit_offset for i in range(self._current_page_subheaders_count): pointer = self._process_subheader_pointers( const.subheader_pointers_offset + bit_offset, i) if pointer.length == 0: continue if pointer.compression == const.truncated_subheader_id: continue subheader_signature = self._read_subheader_signature( pointer.offset) subheader_index = ( self._get_subheader_index(subheader_signature, pointer.compression, pointer.ptype)) self._process_subheader(subheader_index, pointer) def _get_subheader_index(self, signature, compression, ptype): index = const.subheader_signature_to_index.get(signature) if index is None: f1 = ((compression == const.compressed_subheader_id) or (compression == 0)) f2 = (ptype == const.compressed_subheader_type) if (self.compression != "") and f1 and f2: index = const.SASIndex.data_subheader_index else: self.close() raise ValueError("Unknown subheader signature") return index def _process_subheader_pointers(self, offset, subheader_pointer_index): subheader_pointer_length = self._subheader_pointer_length total_offset = (offset + subheader_pointer_length * subheader_pointer_index) subheader_offset = self._read_int(total_offset, self._int_length) total_offset += self._int_length subheader_length = self._read_int(total_offset, self._int_length) total_offset += self._int_length subheader_compression = self._read_int(total_offset, 1) total_offset += 1 subheader_type = self._read_int(total_offset, 1) x = _subheader_pointer() x.offset = subheader_offset x.length = subheader_length x.compression = subheader_compression x.ptype = subheader_type return x def _read_subheader_signature(self, offset): subheader_signature = self._read_bytes(offset, self._int_length) return subheader_signature def _process_subheader(self, subheader_index, pointer): offset = pointer.offset length = pointer.length if subheader_index == const.SASIndex.row_size_index: processor = self._process_rowsize_subheader elif subheader_index == const.SASIndex.column_size_index: processor = self._process_columnsize_subheader elif subheader_index == const.SASIndex.column_text_index: processor = self._process_columntext_subheader elif subheader_index == const.SASIndex.column_name_index: processor = self._process_columnname_subheader elif subheader_index == const.SASIndex.column_attributes_index: processor = self._process_columnattributes_subheader elif subheader_index == const.SASIndex.format_and_label_index: processor = self._process_format_subheader elif subheader_index == const.SASIndex.column_list_index: processor = self._process_columnlist_subheader elif subheader_index == const.SASIndex.subheader_counts_index: processor = self._process_subheader_counts elif subheader_index == const.SASIndex.data_subheader_index: self._current_page_data_subheader_pointers.append(pointer) return else: raise ValueError("unknown subheader index") processor(offset, length) def _process_rowsize_subheader(self, offset, length): int_len = self._int_length lcs_offset = offset lcp_offset = offset if self.U64: lcs_offset += 682 lcp_offset += 706 else: lcs_offset += 354 lcp_offset += 378 self.row_length = self._read_int( offset + const.row_length_offset_multiplier * int_len, int_len) self.row_count = self._read_int( offset + const.row_count_offset_multiplier * int_len, int_len) self.col_count_p1 = self._read_int( offset + const.col_count_p1_multiplier * int_len, int_len) self.col_count_p2 = self._read_int( offset + const.col_count_p2_multiplier * int_len, int_len) mx = const.row_count_on_mix_page_offset_multiplier * int_len self._mix_page_row_count = self._read_int(offset + mx, int_len) self._lcs = self._read_int(lcs_offset, 2) self._lcp = self._read_int(lcp_offset, 2) def _process_columnsize_subheader(self, offset, length): int_len = self._int_length offset += int_len self.column_count = self._read_int(offset, int_len) if (self.col_count_p1 + self.col_count_p2 != self.column_count): print("Warning: column count mismatch (%d + %d != %d)\n", self.col_count_p1, self.col_count_p2, self.column_count) # Unknown purpose def _process_subheader_counts(self, offset, length): pass def _process_columntext_subheader(self, offset, length): offset += self._int_length text_block_size = self._read_int(offset, const.text_block_size_length) buf = self._read_bytes(offset, text_block_size) cname_raw = buf[0:text_block_size].rstrip(b"\x00 ") cname = cname_raw if self.convert_header_text: cname = cname.decode(self.encoding or self.default_encoding) self.column_names_strings.append(cname) if len(self.column_names_strings) == 1: compression_literal = "" for cl in const.compression_literals: if cl in cname_raw: compression_literal = cl self.compression = compression_literal offset -= self._int_length offset1 = offset + 16 if self.U64: offset1 += 4 buf = self._read_bytes(offset1, self._lcp) compression_literal = buf.rstrip(b"\x00") if compression_literal == "": self._lcs = 0 offset1 = offset + 32 if self.U64: offset1 += 4 buf = self._read_bytes(offset1, self._lcp) self.creator_proc = buf[0:self._lcp] elif compression_literal == const.rle_compression: offset1 = offset + 40 if self.U64: offset1 += 4 buf = self._read_bytes(offset1, self._lcp) self.creator_proc = buf[0:self._lcp] elif self._lcs > 0: self._lcp = 0 offset1 = offset + 16 if self.U64: offset1 += 4 buf = self._read_bytes(offset1, self._lcs) self.creator_proc = buf[0:self._lcp] if self.convert_header_text: if hasattr(self, "creator_proc"): self.creator_proc = self.creator_proc.decode( self.encoding or self.default_encoding) def _process_columnname_subheader(self, offset, length): int_len = self._int_length offset += int_len column_name_pointers_count = (length - 2 * int_len - 12) // 8 for i in range(column_name_pointers_count): text_subheader = offset + const.column_name_pointer_length * \ (i + 1) + const.column_name_text_subheader_offset col_name_offset = offset + const.column_name_pointer_length * \ (i + 1) + const.column_name_offset_offset col_name_length = offset + const.column_name_pointer_length * \ (i + 1) + const.column_name_length_offset idx = self._read_int( text_subheader, const.column_name_text_subheader_length) col_offset = self._read_int( col_name_offset, const.column_name_offset_length) col_len = self._read_int( col_name_length, const.column_name_length_length) name_str = self.column_names_strings[idx] self.column_names.append(name_str[col_offset:col_offset + col_len]) def _process_columnattributes_subheader(self, offset, length): int_len = self._int_length column_attributes_vectors_count = ( length - 2 * int_len - 12) // (int_len + 8) for i in range(column_attributes_vectors_count): col_data_offset = (offset + int_len + const.column_data_offset_offset + i * (int_len + 8)) col_data_len = (offset + 2 * int_len + const.column_data_length_offset + i * (int_len + 8)) col_types = (offset + 2 * int_len + const.column_type_offset + i * (int_len + 8)) x = self._read_int(col_data_offset, int_len) self._column_data_offsets.append(x) x = self._read_int(col_data_len, const.column_data_length_length) self._column_data_lengths.append(x) x = self._read_int(col_types, const.column_type_length) self._column_types.append(b'd' if x == 1 else b's') def _process_columnlist_subheader(self, offset, length): # unknown purpose pass def _process_format_subheader(self, offset, length): int_len = self._int_length text_subheader_format = ( offset + const.column_format_text_subheader_index_offset + 3 * int_len) col_format_offset = (offset + const.column_format_offset_offset + 3 * int_len) col_format_len = (offset + const.column_format_length_offset + 3 * int_len) text_subheader_label = ( offset + const.column_label_text_subheader_index_offset + 3 * int_len) col_label_offset = (offset + const.column_label_offset_offset + 3 * int_len) col_label_len = offset + const.column_label_length_offset + 3 * int_len x = self._read_int(text_subheader_format, const.column_format_text_subheader_index_length) format_idx = min(x, len(self.column_names_strings) - 1) format_start = self._read_int( col_format_offset, const.column_format_offset_length) format_len = self._read_int( col_format_len, const.column_format_length_length) label_idx = self._read_int( text_subheader_label, const.column_label_text_subheader_index_length) label_idx = min(label_idx, len(self.column_names_strings) - 1) label_start = self._read_int( col_label_offset, const.column_label_offset_length) label_len = self._read_int(col_label_len, const.column_label_length_length) label_names = self.column_names_strings[label_idx] column_label = label_names[label_start: label_start + label_len] format_names = self.column_names_strings[format_idx] column_format = format_names[format_start: format_start + format_len] current_column_number = len(self.columns) col = _column() col.col_id = current_column_number col.name = self.column_names[current_column_number] col.label = column_label col.format = column_format col.ctype = self._column_types[current_column_number] col.length = self._column_data_lengths[current_column_number] self.column_formats.append(column_format) self.columns.append(col) def read(self, nrows=None): if (nrows is None) and (self.chunksize is not None): nrows = self.chunksize elif nrows is None: nrows = self.row_count if len(self._column_types) == 0: self.close() raise EmptyDataError("No columns to parse from file") if self._current_row_in_file_index >= self.row_count: return None m = self.row_count - self._current_row_in_file_index if nrows > m: nrows = m nd = self._column_types.count(b'd') ns = self._column_types.count(b's') self._string_chunk = np.empty((ns, nrows), dtype=np.object) self._byte_chunk = np.zeros((nd, 8 * nrows), dtype=np.uint8) self._current_row_in_chunk_index = 0 p = Parser(self) p.read(nrows) rslt = self._chunk_to_dataframe() if self.index is not None: rslt = rslt.set_index(self.index) return rslt def _read_next_page(self): self._current_page_data_subheader_pointers = [] self._cached_page = self._path_or_buf.read(self._page_length) if len(self._cached_page) <= 0: return True elif len(self._cached_page) != self._page_length: self.close() msg = ("failed to read complete page from file " "(read {:d} of {:d} bytes)") raise ValueError(msg.format(len(self._cached_page), self._page_length)) self._read_page_header() page_type = self._current_page_type if page_type == const.page_meta_type: self._process_page_metadata() is_data_page = page_type & const.page_data_type pt = [const.page_meta_type] + const.page_mix_types if not is_data_page and self._current_page_type not in pt: return self._read_next_page() return False def _chunk_to_dataframe(self): n = self._current_row_in_chunk_index m = self._current_row_in_file_index ix = range(m - n, m) rslt = pd.DataFrame(index=ix) js, jb = 0, 0 for j in range(self.column_count): name = self.column_names[j] if self._column_types[j] == b'd': rslt[name] = self._byte_chunk[jb, :].view( dtype=self.byte_order + 'd') rslt[name] = np.asarray(rslt[name], dtype=np.float64) if self.convert_dates: unit = None if self.column_formats[j] in const.sas_date_formats: unit = 'd' elif self.column_formats[j] in const.sas_datetime_formats: unit = 's' if unit: rslt[name] = pd.to_datetime(rslt[name], unit=unit, origin="1960-01-01") jb += 1 elif self._column_types[j] == b's': rslt[name] = self._string_chunk[js, :] if self.convert_text and (self.encoding is not None): rslt[name] = rslt[name].str.decode( self.encoding or self.default_encoding) if self.blank_missing: ii = rslt[name].str.len() == 0 rslt.loc[ii, name] = np.nan js += 1 else: self.close() raise ValueError("unknown column type %s" % self._column_types[j]) return rslt
# -*- coding: utf-8 -*- from datetime import datetime import pytest import pytz from pandas.errors import NullFrequencyError import pandas as pd from pandas import DatetimeIndex, Series, date_range import pandas.util.testing as tm class TestDatetimeIndexArithmetic(object): # ------------------------------------------------------------- # DatetimeIndex.shift is used in integer addition def test_dti_shift_tzaware(self, tz_naive_fixture): # GH#9903 tz = tz_naive_fixture idx = pd.DatetimeIndex([], name='xxx', tz=tz) tm.assert_index_equal(idx.shift(0, freq='H'), idx) tm.assert_index_equal(idx.shift(3, freq='H'), idx) idx = pd.DatetimeIndex(['2011-01-01 10:00', '2011-01-01 11:00', '2011-01-01 12:00'], name='xxx', tz=tz) tm.assert_index_equal(idx.shift(0, freq='H'), idx) exp = pd.DatetimeIndex(['2011-01-01 13:00', '2011-01-01 14:00', '2011-01-01 15:00'], name='xxx', tz=tz) tm.assert_index_equal(idx.shift(3, freq='H'), exp) exp = pd.DatetimeIndex(['2011-01-01 07:00', '2011-01-01 08:00', '2011-01-01 09:00'], name='xxx', tz=tz) tm.assert_index_equal(idx.shift(-3, freq='H'), exp) def test_dti_shift_freqs(self): # test shift for DatetimeIndex and non DatetimeIndex # GH#8083 drange = pd.date_range('20130101', periods=5) result = drange.shift(1) expected = pd.DatetimeIndex(['2013-01-02', '2013-01-03', '2013-01-04', '2013-01-05', '2013-01-06'], freq='D') tm.assert_index_equal(result, expected) result = drange.shift(-1) expected = pd.DatetimeIndex(['2012-12-31', '2013-01-01', '2013-01-02', '2013-01-03', '2013-01-04'], freq='D') tm.assert_index_equal(result, expected) result = drange.shift(3, freq='2D') expected = pd.DatetimeIndex(['2013-01-07', '2013-01-08', '2013-01-09', '2013-01-10', '2013-01-11'], freq='D') tm.assert_index_equal(result, expected) def test_dti_shift_int(self): rng = date_range('1/1/2000', periods=20) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): # GH#22535 result = rng + 5 expected = rng.shift(5) tm.assert_index_equal(result, expected) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): # GH#22535 result = rng - 5 expected = rng.shift(-5) tm.assert_index_equal(result, expected) def test_dti_shift_no_freq(self): # GH#19147 dti = pd.DatetimeIndex(['2011-01-01 10:00', '2011-01-01'], freq=None) with pytest.raises(NullFrequencyError): dti.shift(2) @pytest.mark.parametrize('tzstr', ['US/Eastern', 'dateutil/US/Eastern']) def test_dti_shift_localized(self, tzstr): dr = date_range('2011/1/1', '2012/1/1', freq='W-FRI') dr_tz = dr.tz_localize(tzstr) result = dr_tz.shift(1, '10T') assert result.tz == dr_tz.tz def test_dti_shift_across_dst(self): # GH 8616 idx = date_range('2013-11-03', tz='America/Chicago', periods=7, freq='H') s = Series(index=idx[:-1]) result = s.shift(freq='H') expected = Series(index=idx[1:]) tm.assert_series_equal(result, expected) @pytest.mark.parametrize('shift, result_time', [ [0, '2014-11-14 00:00:00'], [-1, '2014-11-13 23:00:00'], [1, '2014-11-14 01:00:00']]) def test_dti_shift_near_midnight(self, shift, result_time): # GH 8616 dt = datetime(2014, 11, 14, 0) dt_est = pytz.timezone('EST').localize(dt) s = Series(data=[1], index=[dt_est]) result = s.shift(shift, freq='H') expected = Series(1, index=DatetimeIndex([result_time], tz='EST')) tm.assert_series_equal(result, expected)
dsm054/pandas
pandas/tests/indexes/datetimes/test_arithmetic.py
pandas/io/sas/sas7bdat.py
""" Copyright (c) 2017 Red Hat, Inc All rights reserved. This software may be modified and distributed under the terms of the BSD license. See the LICENSE file for details. """ from __future__ import print_function, unicode_literals import os import re import logging import warnings from collections import namedtuple try: import dockpulp from dockpulp import setup_logger except (ImportError, SyntaxError): dockpulp = None logger = logging.getLogger(__name__) PulpRepo = namedtuple('PulpRepo', ['registry_id', 'tags']) # let's silence warnings from dockpulp: there is one warning for every request # which may result in tenths of messages: very annoying # with "module", it just prints one warning -- this should balance security and UX warnings.filterwarnings("module") class PulpHandler(object): CER = 'pulp.cer' KEY = 'pulp.key' def __init__(self, workflow, pulp_instance, log, pulp_secret_path=None, username=None, password=None, dockpulp_loglevel=None): self.workflow = workflow self.pulp_instance = pulp_instance self.pulp_secret_path = pulp_secret_path self.log = log # U/N & password has bigger prio than secret cert self.username = username self.password = password self.p = None if dockpulp_loglevel is not None: logger = setup_logger(dockpulp.log) try: logger.setLevel(dockpulp_loglevel) except (ValueError, TypeError) as ex: self.log.error("Can't set provided log level %r: %r", dockpulp_loglevel, ex) def check_file(self, filename): # Sanity-check image manifest = dockpulp.imgutils.get_manifest(filename) vers = dockpulp.imgutils.get_versions(manifest) for _, version in vers.items(): verparts = version.split('.') major = int(verparts[0]) if major < 1: minor = 0 if len(verparts) > 1: minor = int(verparts[1]) if minor < 10: raise RuntimeError('An image layer uses an unsupported ' 'version of docker (%s)' % version) r_chk = dockpulp.imgutils.check_repo(filename) if r_chk == 1: raise RuntimeError('Image is missing a /repositories file') elif r_chk == 2: raise RuntimeError('Pulp demands exactly 1 repo in /repositories') elif r_chk == 3: raise RuntimeError('/repositories references external images') def _set_auth(self): # The pulp.cer and pulp.key values must be set in a # 'Secret'-type resource and mounted somewhere we can get at them. if self.username and self.password: self.p.login(self.username, self.password) elif self.pulp_secret_path or 'SOURCE_SECRET_PATH' in os.environ: if self.pulp_secret_path is not None: path = self.pulp_secret_path self.log.info("using configured path %s for secrets", path) else: path = os.environ["SOURCE_SECRET_PATH"] self.log.info("SOURCE_SECRET_PATH=%s from environment", path) # Work out the pathnames for the certificate/key pair. cer = os.path.join(path, self.CER) key = os.path.join(path, self.KEY) if not os.path.exists(cer): raise RuntimeError("Certificate does not exist.") if not os.path.exists(key): raise RuntimeError("Key does not exist.") # Tell dockpulp. self.p.set_certs(cer, key) def _create_missing_repos(self, pulp_repos, repo_prefix): repos = pulp_repos.keys() found_repos = self.p.getRepos(repos, fields=["id"]) found_repo_ids = [repo["id"] for repo in found_repos] missing_repos = set(repos) - set(found_repo_ids) self.log.info("Missing repos: %s" % ", ".join(missing_repos)) for repo in missing_repos: self.p.createRepo(repo, None, registry_id=pulp_repos[repo].registry_id, prefix_with=repo_prefix) def get_tar_metadata(self, tarfile): metadata = dockpulp.imgutils.get_metadata(tarfile) pulp_md = dockpulp.imgutils.get_metadata_pulp(metadata) layers = pulp_md.keys() top_layer = dockpulp.imgutils.get_top_layer(pulp_md) return top_layer, layers def create_dockpulp(self): self.p = dockpulp.Pulp(env=self.pulp_instance) self._set_auth() def create_dockpulp_and_repos(self, image_names, repo_prefix="redhat-"): self.create_dockpulp() # pulp_repos is mapping from repo-ids to registry-ids and tags # which should be applied to those repos, expected structure: # { # "my-image": PulpRepo(registry_id="nick/my-image", tags=["v1", "latest"]) # ... # } pulp_repos = {} for image in image_names: repo_id = image.pulp_repo self.log.info("adding repo %s", repo_id) tag = image.tag if image.tag else 'latest' if repo_prefix: repo_id = repo_prefix + repo_id if repo_id in pulp_repos: pulp_repos[repo_id].tags.append(tag) else: pulp_repos[repo_id] = PulpRepo( registry_id=image.to_str(registry=False, tag=False), tags=[tag] ) self._create_missing_repos(pulp_repos, repo_prefix) return pulp_repos def get_image_ids_existing(self, layers): return self.p.getImageIdsExist(layers) def upload(self, filename): self.p.upload(filename) def copy(self, repo_id, layer): self.p.copy(repo_id, layer) def update_repo(self, repo_id, tag): self.p.updateRepo(repo_id, tag) def remove_image(self, repo_id, image): self.p.remove(repo_id, image) def publish(self, keys): # dockpulp will call publish for every repository if len(keys) == 0 # so check to make sure keys has values assert keys task_ids = self.p.crane(keys, wait=True) self.log.info("waiting for repos to be published to crane, tasks: %s", ", ".join(map(str, task_ids))) self.p.watch_tasks(task_ids) def get_registry_hostname(self): return re.sub(r'^https?://([^/]*)/?.*', lambda m: m.groups()[0], self.p.registry) def get_pulp_instance(self): return self.pulp_instance
""" Copyright (c) 2015 Red Hat, Inc All rights reserved. This software may be modified and distributed under the terms of the BSD license. See the LICENSE file for details. """ from __future__ import unicode_literals import json import os import tempfile import pytest import requests import responses from requests.exceptions import ConnectionError import six from tempfile import mkdtemp from textwrap import dedent from flexmock import flexmock from collections import OrderedDict import docker from atomic_reactor.build import BuildResult from atomic_reactor.constants import (IMAGE_TYPE_DOCKER_ARCHIVE, IMAGE_TYPE_OCI, IMAGE_TYPE_OCI_TAR) from atomic_reactor.inner import DockerBuildWorkflow from atomic_reactor.util import (ImageName, wait_for_command, clone_git_repo, LazyGit, figure_out_build_file, render_yum_repo, process_substitutions, get_checksums, print_version_of_tools, get_version_of_tools, get_preferred_label_key, human_size, CommandResult, get_manifest_digests, ManifestDigest, get_build_json, is_scratch_build, df_parser, are_plugins_in_order, LabelFormatter, get_manifest_media_type, get_retrying_requests_session, get_primary_images, get_image_upload_filename) from atomic_reactor import util from tests.constants import (DOCKERFILE_GIT, FLATPAK_GIT, INPUT_IMAGE, MOCK, DOCKERFILE_SHA1, MOCK_SOURCE) from atomic_reactor.constants import INSPECT_CONFIG from tests.util import requires_internet if MOCK: from tests.docker_mock import mock_docker from tests.retry_mock import mock_get_retry_session TEST_DATA = { "repository.com/image-name": ImageName(registry="repository.com", repo="image-name"), "repository.com/prefix/image-name:1": ImageName(registry="repository.com", namespace="prefix", repo="image-name", tag="1"), "repository.com/prefix/image-name@sha256:12345": ImageName(registry="repository.com", namespace="prefix", repo="image-name", tag="sha256:12345"), "repository.com/prefix/image-name": ImageName(registry="repository.com", namespace="prefix", repo="image-name"), "image-name": ImageName(repo="image-name"), "registry:5000/image-name:latest": ImageName(registry="registry:5000", repo="image-name", tag="latest"), "registry:5000/image-name@sha256:12345": ImageName(registry="registry:5000", repo="image-name", tag="sha256:12345"), "registry:5000/image-name": ImageName(registry="registry:5000", repo="image-name"), "fedora:20": ImageName(repo="fedora", tag="20"), "fedora@sha256:12345": ImageName(repo="fedora", tag="sha256:12345"), "prefix/image-name:1": ImageName(namespace="prefix", repo="image-name", tag="1"), "prefix/image-name@sha256:12345": ImageName(namespace="prefix", repo="image-name", tag="sha256:12345"), "library/fedora:20": ImageName(namespace="library", repo="fedora", tag="20"), "library/fedora@sha256:12345": ImageName(namespace="library", repo="fedora", tag="sha256:12345"), } def test_image_name_parse(): for inp, parsed in TEST_DATA.items(): assert ImageName.parse(inp) == parsed def test_image_name_format(): for expected, image_name in TEST_DATA.items(): assert image_name.to_str() == expected def test_image_name_comparison(): # make sure that both "==" and "!=" are implemented right on both Python major releases i1 = ImageName(registry='foo.com', namespace='spam', repo='bar', tag='1') i2 = ImageName(registry='foo.com', namespace='spam', repo='bar', tag='1') assert i1 == i2 assert not i1 != i2 i2 = ImageName(registry='foo.com', namespace='spam', repo='bar', tag='2') assert not i1 == i2 assert i1 != i2 def test_wait_for_command(): if MOCK: mock_docker() d = docker.APIClient() logs_gen = d.pull(INPUT_IMAGE, decode=True, stream=True) assert wait_for_command(logs_gen) is not None @requires_internet def test_clone_git_repo(tmpdir): tmpdir_path = str(tmpdir.realpath()) commit_id = clone_git_repo(DOCKERFILE_GIT, tmpdir_path) assert commit_id is not None assert len(commit_id) == 40 # current git hashes are this long assert os.path.isdir(os.path.join(tmpdir_path, '.git')) class TestCommandResult(object): @pytest.mark.parametrize(('item', 'expected'), [ ({"stream": "Step 0 : FROM ebbc51b7dfa5bcd993a[...]"}, "Step 0 : FROM ebbc51b7dfa5bcd993a[...]"), ('this is not valid JSON', 'this is not valid JSON'), ]) def test_parse_item(self, item, expected): cr = CommandResult() cr.parse_item(item) assert cr.logs == [expected] @requires_internet def test_clone_git_repo_by_sha1(tmpdir): tmpdir_path = str(tmpdir.realpath()) commit_id = clone_git_repo(DOCKERFILE_GIT, tmpdir_path, commit=DOCKERFILE_SHA1) assert commit_id is not None assert six.text_type(commit_id, encoding="ascii") == six.text_type(DOCKERFILE_SHA1) assert len(commit_id) == 40 # current git hashes are this long assert os.path.isdir(os.path.join(tmpdir_path, '.git')) @requires_internet @pytest.mark.parametrize('repository,expected_path', [ (DOCKERFILE_GIT, "Dockerfile"), (FLATPAK_GIT, "flatpak.json"), ]) def test_figure_out_build_file(tmpdir, repository, expected_path): tmpdir_path = str(tmpdir.realpath()) clone_git_repo(repository, tmpdir_path) path, dir = figure_out_build_file(tmpdir_path) assert path == os.path.join(tmpdir_path, expected_path) assert os.path.isfile(path) assert os.path.isdir(dir) @requires_internet def test_lazy_git(): lazy_git = LazyGit(git_url=DOCKERFILE_GIT) with lazy_git: assert lazy_git.git_path is not None assert lazy_git.commit_id is not None assert len(lazy_git.commit_id) == 40 # current git hashes are this long @requires_internet def test_lazy_git_with_tmpdir(tmpdir): t = str(tmpdir.realpath()) lazy_git = LazyGit(git_url=DOCKERFILE_GIT, tmpdir=t) assert lazy_git._tmpdir == t assert lazy_git.git_path is not None assert lazy_git.commit_id is not None assert len(lazy_git.commit_id) == 40 # current git hashes are this long def test_render_yum_repo_unicode(): yum_repo = OrderedDict(( ("name", "asd"), ("baseurl", "http://example.com/$basearch/test.repo"), ("enabled", "1"), ("gpgcheck", "0"), )) rendered_repo = render_yum_repo(yum_repo) assert rendered_repo == """\ [asd] name=asd baseurl=http://example.com/\$basearch/test.repo enabled=1 gpgcheck=0 """ @pytest.mark.parametrize('dct, subst, expected', [ ({'foo': 'bar'}, ['foo=spam'], {'foo': 'spam'}), ({'foo': 'bar'}, ['baz=spam'], {'foo': 'bar', 'baz': 'spam'}), ({'foo': 'bar'}, ['foo.bar=spam'], {'foo': {'bar': 'spam'}}), ({'foo': 'bar'}, ['spam.spam=spam'], {'foo': 'bar', 'spam': {'spam': 'spam'}}), ({'x_plugins': [{'name': 'a', 'args': {'b': 'c'}}]}, {'x_plugins.a.b': 'd'}, {'x_plugins': [{'name': 'a', 'args': {'b': 'd'}}]}), # substituting plugins doesn't add new params ({'x_plugins': [{'name': 'a', 'args': {'b': 'c'}}]}, {'x_plugins.a.c': 'd'}, {'x_plugins': [{'name': 'a', 'args': {'b': 'c'}}]}), ({'x_plugins': [{'name': 'a', 'args': {'b': 'c'}}]}, {'x_plugins.X': 'd'}, ValueError()), ]) def test_process_substitutions(dct, subst, expected): if isinstance(expected, Exception): with pytest.raises(type(expected)): process_substitutions(dct, subst) else: process_substitutions(dct, subst) assert dct == expected @pytest.mark.parametrize('content, algorithms, expected', [ (b'abc', ['md5', 'sha256'], {'md5sum': '900150983cd24fb0d6963f7d28e17f72', 'sha256sum': 'ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad'}), (b'abc', ['md5'], {'md5sum': '900150983cd24fb0d6963f7d28e17f72'}), (b'abc', [], {}) ]) def test_get_hexdigests(tmpdir, content, algorithms, expected): with tempfile.NamedTemporaryFile(dir=str(tmpdir)) as tmpfile: tmpfile.write(content) tmpfile.flush() checksums = get_checksums(tmpfile.name, algorithms) assert checksums == expected @pytest.mark.parametrize('path, image_type, expected', [ ('foo.tar', IMAGE_TYPE_DOCKER_ARCHIVE, 'docker-image-XXX.x86_64.tar'), ('foo.tar.gz', IMAGE_TYPE_DOCKER_ARCHIVE, 'docker-image-XXX.x86_64.tar.gz'), ('foo.tar.gz', IMAGE_TYPE_OCI_TAR, 'oci-image-XXX.x86_64.tar.gz'), ('foo', IMAGE_TYPE_OCI, None), ]) def test_get_image_upload_filename(path, image_type, expected): metadata = { 'path': path, 'type': image_type, } if expected is None: with pytest.raises(ValueError): get_image_upload_filename(metadata, 'XXX', 'x86_64') else: assert get_image_upload_filename(metadata, 'XXX', 'x86_64') == expected def test_get_versions_of_tools(): response = get_version_of_tools() assert isinstance(response, list) for t in response: assert t["name"] assert t["version"] def test_print_versions_of_tools(): print_version_of_tools() @pytest.mark.parametrize('labels, name, expected', [ ({'name': 'foo', 'Name': 'foo'}, 'name', 'name'), ({'name': 'foo', 'Name': 'foo'}, 'Name', 'name'), ({'name': 'foo'}, 'Name', 'name'), ({'Name': 'foo'}, 'name', 'Name'), ({}, 'Name', 'name'), ({}, 'foobar', 'foobar') ]) def test_preferred_labels(labels, name, expected): result = get_preferred_label_key(labels, name) assert result == expected @pytest.mark.parametrize('size_input,expected', [ (0, "0.00 B"), (1, "1.00 B"), (-1, "-1.00 B"), (1536, "1.50 KiB"), (-1024, "-1.00 KiB"), (204800, "200.00 KiB"), (6983516, "6.66 MiB"), (14355928186, "13.37 GiB"), (135734710448947, "123.45 TiB"), (1180579814801204129310965, "999.99 ZiB"), (1074589982539051580812825722, "888.88 YiB"), (4223769947617154742438477168, "3493.82 YiB"), (-4223769947617154742438477168, "-3493.82 YiB"), ]) def test_human_size(size_input, expected): assert human_size(size_input) == expected @pytest.mark.parametrize(('version', 'expected'), [ ('v1', 'application/vnd.docker.distribution.manifest.v1+json'), ('v2', 'application/vnd.docker.distribution.manifest.v2+json'), ('v2_list', 'application/vnd.docker.distribution.manifest.list.v2+json'), ]) def test_get_manifest_media_type(version, expected): assert get_manifest_media_type(version) == expected @pytest.mark.parametrize('insecure', [ True, False, ]) @pytest.mark.parametrize('versions,require_digest', [ (('v1', 'v2', 'v2_list'), True), (('v1', 'v2', 'v2_list'), False), (('v1',), False), (('v1',), True), (('v2',), False), (('v2',), True), (tuple(), False), (tuple(), True), (None, False), (None, True), (('v2_list',), True), (('v2_list',), False), ]) @pytest.mark.parametrize('creds', [ ('user1', 'pass'), (None, 'pass'), ('user1', None), None, ]) @pytest.mark.parametrize('image,registry,path', [ ('not-used.com/spam:latest', 'localhost.com', '/v2/spam/manifests/latest'), ('not-used.com/food/spam:latest', 'http://localhost.com', '/v2/food/spam/manifests/latest'), ('not-used.com/spam', 'https://localhost.com', '/v2/spam/manifests/latest'), ]) @responses.activate def test_get_manifest_digests(tmpdir, image, registry, insecure, creds, versions, require_digest, path): kwargs = {} image = ImageName.parse(image) kwargs['image'] = image if creds: temp_dir = mkdtemp(dir=str(tmpdir)) with open(os.path.join(temp_dir, '.dockercfg'), 'w+') as dockerconfig: dockerconfig.write(json.dumps({ image.registry: { 'username': creds[0], 'password': creds[1] } })) kwargs['dockercfg_path'] = temp_dir kwargs['registry'] = registry if insecure is not None: kwargs['insecure'] = insecure if versions is not None: kwargs['versions'] = versions kwargs['require_digest'] = require_digest def request_callback(request, all_headers=True): if creds and creds[0] and creds[1]: assert request.headers['Authorization'] media_type = request.headers['Accept'] if media_type.endswith('list.v2+json'): digest = 'v2_list-digest' elif media_type.endswith('v2+json'): digest = 'v2-digest' elif media_type.endswith('v1+json'): digest = 'v1-digest' else: raise ValueError('Unexpected media type {}'.format(media_type)) media_type_prefix = media_type.split('+')[0] if all_headers: headers = { 'Content-Type': '{}+jsonish'.format(media_type_prefix), } if not media_type.endswith('list.v2+json'): headers['Docker-Content-Digest'] = digest else: headers = {} return (200, headers, '') if registry.startswith('http'): url = registry + path else: # In the insecure case, we should try the https URL, and when that produces # an error, fall back to http if insecure: https_url = 'https://' + registry + path responses.add(responses.GET, https_url, body=ConnectionError()) url = 'http://' + registry + path else: url = 'https://' + registry + path responses.add_callback(responses.GET, url, callback=request_callback) expected_versions = versions if versions is None: # Test default versions value expected_versions = ('v1', 'v2') expected_result = dict( (version, '{}-digest'.format(version)) for version in expected_versions) if versions and 'v2_list' in versions: expected_result['v2_list'] = True if expected_versions: actual_digests = get_manifest_digests(**kwargs) assert actual_digests.v1 == expected_result.get('v1') assert actual_digests.v2 == expected_result.get('v2') if 'v2_list' in expected_result: assert actual_digests.v2_list == expected_result.get('v2_list') elif require_digest: with pytest.raises(RuntimeError): get_manifest_digests(**kwargs) else: get_manifest_digests(**kwargs) @pytest.mark.parametrize('has_content_type_header', [ True, False ]) @pytest.mark.parametrize('has_content_digest', [ True, False ]) @pytest.mark.parametrize('manifest_type,can_convert_v2_v1', [ ('v1', False), ('v2', True), ('v2', False), ('oci', False) ]) def test_get_manifest_digests_missing(tmpdir, has_content_type_header, has_content_digest, manifest_type, can_convert_v2_v1): kwargs = {} image = ImageName.parse('example.com/spam:latest') kwargs['image'] = image kwargs['registry'] = 'https://example.com' expected_url = 'https://example.com/v2/spam/manifests/latest' mock_get_retry_session() def custom_get(url, headers, **kwargs): assert url == expected_url media_type = headers['Accept'] media_type_prefix = media_type.split('+')[0] assert media_type.endswith('+json') # Attempt to simulate how a docker registry behaves: # * If the stored digest is v1, return it # * If the stored digest is v2, and v2 is requested, return it # * If the stored digest is v2, and v1 is requested, try # to convert and return v1 or an error. if manifest_type == 'v1': digest = 'v1-digest' media_type_prefix = 'application/vnd.docker.distribution.manifest.v1' elif manifest_type == 'v2': if media_type_prefix == 'application/vnd.docker.distribution.manifest.v2': digest = 'v2-digest' else: if not can_convert_v2_v1: response_json = {"errors": [{"code": "MANIFEST_INVALID"}]} response = requests.Response() flexmock(response, status_code=400, content=json.dumps(response_json).encode("utf-8"), headers=headers) return response digest = 'v1-converted-digest' media_type_prefix = 'application/vnd.docker.distribution.manifest.v1' elif manifest_type == 'oci': if media_type_prefix == 'application/vnd.oci.image.manifest.v1': digest = 'oci-digest' else: headers = {} response_json = {"errors": [{"code": "MANIFEST_UNKNOWN"}]} response = requests.Response() flexmock(response, status_code=requests.codes.not_found, content=json.dumps(response_json).encode("utf-8"), headers=headers) return response headers = {} if has_content_type_header: headers['Content-Type'] = '{}+jsonish'.format(media_type_prefix) if has_content_digest: headers['Docker-Content-Digest'] = digest if media_type_prefix == 'application/vnd.docker.distribution.manifest.v1': response_json = {'schemaVersion': 1} else: response_json = {'schemaVersion': 2, 'mediaType': media_type_prefix + '+json'} response = requests.Response() flexmock(response, status_code=200, content=json.dumps(response_json).encode("utf-8"), headers=headers) return response (flexmock(requests.Session) .should_receive('get') .replace_with(custom_get)) if manifest_type == 'v1' and not has_content_type_header: # v1 manifests don't have a mediaType field, so we can't fall back # to looking at the returned manifest to detect the type. with pytest.raises(RuntimeError): get_manifest_digests(**kwargs) return else: actual_digests = get_manifest_digests(**kwargs) if manifest_type == 'v1': if has_content_digest: assert actual_digests.v1 == 'v1-digest' else: assert actual_digests.v1 is True assert actual_digests.v2 is None assert actual_digests.oci is None elif manifest_type == 'v2': if can_convert_v2_v1: if has_content_type_header: if has_content_digest: assert actual_digests.v1 == 'v1-converted-digest' else: assert actual_digests.v1 is True else: # don't even know the response is v1 without Content-Type assert actual_digests.v1 is None else: assert actual_digests.v1 is None if has_content_digest: assert actual_digests.v2 == 'v2-digest' else: assert actual_digests.v2 is True assert actual_digests.oci is None elif manifest_type == 'oci': assert actual_digests.v1 is None assert actual_digests.v2 is None if has_content_digest: assert actual_digests.oci == 'oci-digest' else: assert actual_digests.oci is True @responses.activate def test_get_manifest_digests_connection_error(tmpdir): # Test that our code to handle falling back from https to http # doesn't do anything unexpected when a connection can't be # made at all. kwargs = {} kwargs['image'] = ImageName.parse('example.com/spam:latest') kwargs['registry'] = 'https://example.com' url = 'https://example.com/v2/spam/manifests/latest' responses.add(responses.GET, url, body=ConnectionError()) with pytest.raises(ConnectionError): get_manifest_digests(**kwargs) @pytest.mark.parametrize('v1,v2,oci,default', [ ('v1-digest', 'v2-digest', None, 'v2-digest'), ('v1-digest', None, None, 'v1-digest'), (None, 'v2-digest', None, 'v2-digest'), (None, 'v2-digest', None, 'v2-digest'), (None, None, 'oci-digest', 'oci-digest'), (None, 'v2-digest', 'oci-digest', 'oci-digest'), (None, None, None, None), ]) def test_manifest_digest(v1, v2, oci, default): md = ManifestDigest(v1=v1, v2=v2, oci=oci) assert md.v1 == v1 assert md.v2 == v2 assert md.oci == oci assert md.default == default @pytest.mark.parametrize('environ,expected', [ ({'BUILD': '{"foo": "bar"}'}, {'foo': 'bar'}), ({}, False), ]) def test_get_build_json(environ, expected): flexmock(os, environ=environ) if expected: assert get_build_json() == {'foo': 'bar'} else: with pytest.raises(KeyError): get_build_json() @pytest.mark.parametrize('build_json,scratch', [ ({'metadata': {'labels': {'scratch': True}}}, True), ({'metadata': {'labels': {'scratch': False}}}, False), ({'metadata': {'labels': {}}}, False), ({'metadata': {}}, None), ({}, None), ]) def test_is_scratch_build(build_json, scratch): flexmock(util).should_receive('get_build_json').and_return(build_json) if scratch is None: with pytest.raises(KeyError): is_scratch_build() else: assert is_scratch_build() == scratch def test_df_parser(tmpdir): tmpdir_path = str(tmpdir.realpath()) df = df_parser(tmpdir_path) df.lines = [ "FROM fedora\n", "ENV foo=\"bar\"\n", "LABEL label=\"foobar barfoo\"\n" ] assert len(df.envs) == 1 assert df.envs.get('foo') == 'bar' assert len(df.labels) == 1 assert df.labels.get('label') == 'foobar barfoo' def test_df_parser_parent_env_arg(tmpdir): p_env = { "test_env": "first" } df_content = dedent("""\ FROM fedora ENV foo=bar LABEL label="foobar $test_env" """) df = df_parser(str(tmpdir), parent_env=p_env) df.content = df_content assert df.labels.get('label') == 'foobar first' @pytest.mark.parametrize('env_arg', [ {"test_env": "first"}, ['test_env=first'], ['test_env='], ['test_env=--option=first --option=second'], ['test_env_first'], ]) def test_df_parser_parent_env_wf(tmpdir, caplog, env_arg): df_content = dedent("""\ FROM fedora ENV foo=bar LABEL label="foobar $test_env" """) env_conf = {INSPECT_CONFIG: {"Env": env_arg}} workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') flexmock(workflow, base_image_inspect=env_conf) df = df_parser(str(tmpdir), workflow=workflow) df.content = df_content if isinstance(env_arg, list) and ('=' not in env_arg[0]): expected_log_message = "Unable to parse all of Parent Config ENV" assert expected_log_message in [l.getMessage() for l in caplog.records()] elif isinstance(env_arg, dict): assert df.labels.get('label') == ('foobar ' + env_arg['test_env']) else: assert df.labels.get('label') == 'foobar ' + env_arg[0].split('=', 1)[1] @pytest.mark.parametrize(('available', 'requested', 'result'), ( (['spam', 'bacon', 'eggs'], ['spam'], True), (['spam', 'bacon', 'eggs'], ['spam', 'bacon'], True), (['spam', 'bacon', 'eggs'], ['spam', 'bacon', 'eggs'], True), (['spam', 'bacon', 'eggs'], ['spam', 'eggs'], True), (['spam', 'bacon', 'eggs'], ['eggs', 'spam'], False), (['spam', 'bacon', 'eggs'], ['spam', 'eggs', 'bacon'], False), (['spam', 'bacon', 'eggs'], ['sausage'], False), )) def test_are_plugins_in_order(available, requested, result): assert are_plugins_in_order([{'name': plugin} for plugin in available], *requested) == result @pytest.mark.parametrize(('test_string', 'labels', 'expected'), [ ('', {}, ''), ('', {'version': 'cat'}, ''), ('dog', {'version': 'cat'}, 'dog'), ('dog', {}, 'dog'), ('{version}', {'version': 'cat'}, 'cat'), ('dog-{version}', {'version': 'cat'}, 'dog-cat'), ('{version}', {}, None), ('{Version}', {'version': 'cat'}, None), ]) def test_label_formatter(labels, test_string, expected): if expected is not None: assert expected == LabelFormatter().vformat(test_string, [], labels) else: with pytest.raises(KeyError): LabelFormatter().vformat(test_string, [], labels) @pytest.mark.parametrize(('tag_conf', 'tag_annotation', 'expected'), ( (['spam', 'bacon'], [], ['spam', 'bacon']), ([], ['spam', 'bacon'], ['spam', 'bacon']), (['spam', 'bacon'], ['ignored', 'scorned'], ['spam', 'bacon']), )) def test_get_primary_images(tag_conf, tag_annotation, expected): template_image = ImageName.parse('registry.example.com/fedora') workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') for tag in tag_conf: image_name = ImageName.parse(str(template_image)) image_name.tag = tag workflow.tag_conf.add_primary_image(str(image_name)) annotations = {} for tag in tag_annotation: annotations.setdefault('repositories', {}).setdefault('primary', []) image_name = ImageName.parse(str(template_image)) image_name.tag = tag annotations['repositories']['primary'].append(str(image_name)) build_result = BuildResult(annotations=annotations, image_id='foo') workflow.build_result = build_result actual = get_primary_images(workflow) assert len(actual) == len(expected) for index, primary_image in enumerate(actual): assert primary_image.registry == template_image.registry assert primary_image.namespace == template_image.namespace assert primary_image.repo == template_image.repo assert primary_image.tag == expected[index]
maxamillion/atomic-reactor
tests/test_util.py
atomic_reactor/pulp_util.py
""" Copyright (c) 2017 Red Hat, Inc All rights reserved. This software may be modified and distributed under the terms of the BSD license. See the LICENSE file for details. """ from __future__ import print_function, unicode_literals from atomic_reactor.constants import INSPECT_CONFIG from atomic_reactor.koji_util import create_koji_session from atomic_reactor.plugin import PreBuildPlugin from atomic_reactor.constants import PLUGIN_KOJI_PARENT_KEY import time DEFAULT_POLL_TIMEOUT = 60 * 10 # 10 minutes DEFAULT_POLL_INTERVAL = 10 # 10 seconds class KojiParentPlugin(PreBuildPlugin): """Wait for Koji build of parent image to be avaialable Uses inspected parent image config to determine the nvr (Name-Version-Release) of the parent image. It uses this information to check if the corresponding Koji build exists. This check is performed periodically until the Koji build is found, or timeout expires. This check is required due to a timing issue that may occur after the image is pushed to registry, but it has not been yet uploaded and tagged in Koji. This plugin ensures that the layered image is only built with a parent image that is known in Koji. """ key = PLUGIN_KOJI_PARENT_KEY is_allowed_to_fail = False def __init__(self, tasker, workflow, koji_hub, koji_ssl_certs_dir=None, poll_interval=DEFAULT_POLL_INTERVAL, poll_timeout=DEFAULT_POLL_TIMEOUT): """ :param tasker: DockerTasker instance :param workflow: DockerBuildWorkflow instance :param koji_hub: str, koji hub (xmlrpc) :param koji_ssl_certs_dir: str, path to "cert", "ca", and "serverca" used when Koji's identity certificate is not trusted :param poll_interval: int, seconds between polling for Koji build :param poll_timeout: int, max amount of seconds to wait for Koji build """ super(KojiParentPlugin, self).__init__(tasker, workflow) koji_auth_info = None if koji_ssl_certs_dir: koji_auth_info = { 'ssl_certs_dir': koji_ssl_certs_dir, } self.koji_session = create_koji_session(koji_hub, koji_auth_info) self.poll_interval = poll_interval self.poll_timeout = poll_timeout self._parent_image_nvr = None self._parent_image_build = None self._poll_start = None def run(self): if not self.detect_parent_image_nvr(): return self.wait_for_parent_image_build() self.verify_parent_image_build() return self.make_result() def detect_parent_image_nvr(self): config = self.workflow.base_image_inspect[INSPECT_CONFIG] labels = config['Labels'] or {} label_names = 'com.redhat.component', 'version', 'release' for label_name in label_names: if label_name not in labels: self._parent_image_nvr = None self.log.info("Failed to find label '%s' in parent image. " "Not waiting for Koji build.", label_name) return False self._parent_image_nvr = '-'.join( labels[label_name] for label_name in label_names) return True def wait_for_parent_image_build(self): self.start_polling_timer() self.log.info('Waiting for parent image Koji build %s', self._parent_image_nvr) while self.is_within_timeout(): if self.has_parent_image_build(): self.log.info('Parent image Koji build found') break time.sleep(self.poll_interval) def start_polling_timer(self): self._poll_start = time.time() def is_within_timeout(self): return (time.time() - self._poll_start) < self.poll_timeout def has_parent_image_build(self): self._parent_image_build = self.koji_session.getBuild(self._parent_image_nvr) return self._parent_image_build is not None def verify_parent_image_build(self): if self._parent_image_build is None: raise ValueError('Parent image Koji build NOT found!') def make_result(self): return {'parent-image-koji-build-id': self._parent_image_build['id']}
""" Copyright (c) 2015 Red Hat, Inc All rights reserved. This software may be modified and distributed under the terms of the BSD license. See the LICENSE file for details. """ from __future__ import unicode_literals import json import os import tempfile import pytest import requests import responses from requests.exceptions import ConnectionError import six from tempfile import mkdtemp from textwrap import dedent from flexmock import flexmock from collections import OrderedDict import docker from atomic_reactor.build import BuildResult from atomic_reactor.constants import (IMAGE_TYPE_DOCKER_ARCHIVE, IMAGE_TYPE_OCI, IMAGE_TYPE_OCI_TAR) from atomic_reactor.inner import DockerBuildWorkflow from atomic_reactor.util import (ImageName, wait_for_command, clone_git_repo, LazyGit, figure_out_build_file, render_yum_repo, process_substitutions, get_checksums, print_version_of_tools, get_version_of_tools, get_preferred_label_key, human_size, CommandResult, get_manifest_digests, ManifestDigest, get_build_json, is_scratch_build, df_parser, are_plugins_in_order, LabelFormatter, get_manifest_media_type, get_retrying_requests_session, get_primary_images, get_image_upload_filename) from atomic_reactor import util from tests.constants import (DOCKERFILE_GIT, FLATPAK_GIT, INPUT_IMAGE, MOCK, DOCKERFILE_SHA1, MOCK_SOURCE) from atomic_reactor.constants import INSPECT_CONFIG from tests.util import requires_internet if MOCK: from tests.docker_mock import mock_docker from tests.retry_mock import mock_get_retry_session TEST_DATA = { "repository.com/image-name": ImageName(registry="repository.com", repo="image-name"), "repository.com/prefix/image-name:1": ImageName(registry="repository.com", namespace="prefix", repo="image-name", tag="1"), "repository.com/prefix/image-name@sha256:12345": ImageName(registry="repository.com", namespace="prefix", repo="image-name", tag="sha256:12345"), "repository.com/prefix/image-name": ImageName(registry="repository.com", namespace="prefix", repo="image-name"), "image-name": ImageName(repo="image-name"), "registry:5000/image-name:latest": ImageName(registry="registry:5000", repo="image-name", tag="latest"), "registry:5000/image-name@sha256:12345": ImageName(registry="registry:5000", repo="image-name", tag="sha256:12345"), "registry:5000/image-name": ImageName(registry="registry:5000", repo="image-name"), "fedora:20": ImageName(repo="fedora", tag="20"), "fedora@sha256:12345": ImageName(repo="fedora", tag="sha256:12345"), "prefix/image-name:1": ImageName(namespace="prefix", repo="image-name", tag="1"), "prefix/image-name@sha256:12345": ImageName(namespace="prefix", repo="image-name", tag="sha256:12345"), "library/fedora:20": ImageName(namespace="library", repo="fedora", tag="20"), "library/fedora@sha256:12345": ImageName(namespace="library", repo="fedora", tag="sha256:12345"), } def test_image_name_parse(): for inp, parsed in TEST_DATA.items(): assert ImageName.parse(inp) == parsed def test_image_name_format(): for expected, image_name in TEST_DATA.items(): assert image_name.to_str() == expected def test_image_name_comparison(): # make sure that both "==" and "!=" are implemented right on both Python major releases i1 = ImageName(registry='foo.com', namespace='spam', repo='bar', tag='1') i2 = ImageName(registry='foo.com', namespace='spam', repo='bar', tag='1') assert i1 == i2 assert not i1 != i2 i2 = ImageName(registry='foo.com', namespace='spam', repo='bar', tag='2') assert not i1 == i2 assert i1 != i2 def test_wait_for_command(): if MOCK: mock_docker() d = docker.APIClient() logs_gen = d.pull(INPUT_IMAGE, decode=True, stream=True) assert wait_for_command(logs_gen) is not None @requires_internet def test_clone_git_repo(tmpdir): tmpdir_path = str(tmpdir.realpath()) commit_id = clone_git_repo(DOCKERFILE_GIT, tmpdir_path) assert commit_id is not None assert len(commit_id) == 40 # current git hashes are this long assert os.path.isdir(os.path.join(tmpdir_path, '.git')) class TestCommandResult(object): @pytest.mark.parametrize(('item', 'expected'), [ ({"stream": "Step 0 : FROM ebbc51b7dfa5bcd993a[...]"}, "Step 0 : FROM ebbc51b7dfa5bcd993a[...]"), ('this is not valid JSON', 'this is not valid JSON'), ]) def test_parse_item(self, item, expected): cr = CommandResult() cr.parse_item(item) assert cr.logs == [expected] @requires_internet def test_clone_git_repo_by_sha1(tmpdir): tmpdir_path = str(tmpdir.realpath()) commit_id = clone_git_repo(DOCKERFILE_GIT, tmpdir_path, commit=DOCKERFILE_SHA1) assert commit_id is not None assert six.text_type(commit_id, encoding="ascii") == six.text_type(DOCKERFILE_SHA1) assert len(commit_id) == 40 # current git hashes are this long assert os.path.isdir(os.path.join(tmpdir_path, '.git')) @requires_internet @pytest.mark.parametrize('repository,expected_path', [ (DOCKERFILE_GIT, "Dockerfile"), (FLATPAK_GIT, "flatpak.json"), ]) def test_figure_out_build_file(tmpdir, repository, expected_path): tmpdir_path = str(tmpdir.realpath()) clone_git_repo(repository, tmpdir_path) path, dir = figure_out_build_file(tmpdir_path) assert path == os.path.join(tmpdir_path, expected_path) assert os.path.isfile(path) assert os.path.isdir(dir) @requires_internet def test_lazy_git(): lazy_git = LazyGit(git_url=DOCKERFILE_GIT) with lazy_git: assert lazy_git.git_path is not None assert lazy_git.commit_id is not None assert len(lazy_git.commit_id) == 40 # current git hashes are this long @requires_internet def test_lazy_git_with_tmpdir(tmpdir): t = str(tmpdir.realpath()) lazy_git = LazyGit(git_url=DOCKERFILE_GIT, tmpdir=t) assert lazy_git._tmpdir == t assert lazy_git.git_path is not None assert lazy_git.commit_id is not None assert len(lazy_git.commit_id) == 40 # current git hashes are this long def test_render_yum_repo_unicode(): yum_repo = OrderedDict(( ("name", "asd"), ("baseurl", "http://example.com/$basearch/test.repo"), ("enabled", "1"), ("gpgcheck", "0"), )) rendered_repo = render_yum_repo(yum_repo) assert rendered_repo == """\ [asd] name=asd baseurl=http://example.com/\$basearch/test.repo enabled=1 gpgcheck=0 """ @pytest.mark.parametrize('dct, subst, expected', [ ({'foo': 'bar'}, ['foo=spam'], {'foo': 'spam'}), ({'foo': 'bar'}, ['baz=spam'], {'foo': 'bar', 'baz': 'spam'}), ({'foo': 'bar'}, ['foo.bar=spam'], {'foo': {'bar': 'spam'}}), ({'foo': 'bar'}, ['spam.spam=spam'], {'foo': 'bar', 'spam': {'spam': 'spam'}}), ({'x_plugins': [{'name': 'a', 'args': {'b': 'c'}}]}, {'x_plugins.a.b': 'd'}, {'x_plugins': [{'name': 'a', 'args': {'b': 'd'}}]}), # substituting plugins doesn't add new params ({'x_plugins': [{'name': 'a', 'args': {'b': 'c'}}]}, {'x_plugins.a.c': 'd'}, {'x_plugins': [{'name': 'a', 'args': {'b': 'c'}}]}), ({'x_plugins': [{'name': 'a', 'args': {'b': 'c'}}]}, {'x_plugins.X': 'd'}, ValueError()), ]) def test_process_substitutions(dct, subst, expected): if isinstance(expected, Exception): with pytest.raises(type(expected)): process_substitutions(dct, subst) else: process_substitutions(dct, subst) assert dct == expected @pytest.mark.parametrize('content, algorithms, expected', [ (b'abc', ['md5', 'sha256'], {'md5sum': '900150983cd24fb0d6963f7d28e17f72', 'sha256sum': 'ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad'}), (b'abc', ['md5'], {'md5sum': '900150983cd24fb0d6963f7d28e17f72'}), (b'abc', [], {}) ]) def test_get_hexdigests(tmpdir, content, algorithms, expected): with tempfile.NamedTemporaryFile(dir=str(tmpdir)) as tmpfile: tmpfile.write(content) tmpfile.flush() checksums = get_checksums(tmpfile.name, algorithms) assert checksums == expected @pytest.mark.parametrize('path, image_type, expected', [ ('foo.tar', IMAGE_TYPE_DOCKER_ARCHIVE, 'docker-image-XXX.x86_64.tar'), ('foo.tar.gz', IMAGE_TYPE_DOCKER_ARCHIVE, 'docker-image-XXX.x86_64.tar.gz'), ('foo.tar.gz', IMAGE_TYPE_OCI_TAR, 'oci-image-XXX.x86_64.tar.gz'), ('foo', IMAGE_TYPE_OCI, None), ]) def test_get_image_upload_filename(path, image_type, expected): metadata = { 'path': path, 'type': image_type, } if expected is None: with pytest.raises(ValueError): get_image_upload_filename(metadata, 'XXX', 'x86_64') else: assert get_image_upload_filename(metadata, 'XXX', 'x86_64') == expected def test_get_versions_of_tools(): response = get_version_of_tools() assert isinstance(response, list) for t in response: assert t["name"] assert t["version"] def test_print_versions_of_tools(): print_version_of_tools() @pytest.mark.parametrize('labels, name, expected', [ ({'name': 'foo', 'Name': 'foo'}, 'name', 'name'), ({'name': 'foo', 'Name': 'foo'}, 'Name', 'name'), ({'name': 'foo'}, 'Name', 'name'), ({'Name': 'foo'}, 'name', 'Name'), ({}, 'Name', 'name'), ({}, 'foobar', 'foobar') ]) def test_preferred_labels(labels, name, expected): result = get_preferred_label_key(labels, name) assert result == expected @pytest.mark.parametrize('size_input,expected', [ (0, "0.00 B"), (1, "1.00 B"), (-1, "-1.00 B"), (1536, "1.50 KiB"), (-1024, "-1.00 KiB"), (204800, "200.00 KiB"), (6983516, "6.66 MiB"), (14355928186, "13.37 GiB"), (135734710448947, "123.45 TiB"), (1180579814801204129310965, "999.99 ZiB"), (1074589982539051580812825722, "888.88 YiB"), (4223769947617154742438477168, "3493.82 YiB"), (-4223769947617154742438477168, "-3493.82 YiB"), ]) def test_human_size(size_input, expected): assert human_size(size_input) == expected @pytest.mark.parametrize(('version', 'expected'), [ ('v1', 'application/vnd.docker.distribution.manifest.v1+json'), ('v2', 'application/vnd.docker.distribution.manifest.v2+json'), ('v2_list', 'application/vnd.docker.distribution.manifest.list.v2+json'), ]) def test_get_manifest_media_type(version, expected): assert get_manifest_media_type(version) == expected @pytest.mark.parametrize('insecure', [ True, False, ]) @pytest.mark.parametrize('versions,require_digest', [ (('v1', 'v2', 'v2_list'), True), (('v1', 'v2', 'v2_list'), False), (('v1',), False), (('v1',), True), (('v2',), False), (('v2',), True), (tuple(), False), (tuple(), True), (None, False), (None, True), (('v2_list',), True), (('v2_list',), False), ]) @pytest.mark.parametrize('creds', [ ('user1', 'pass'), (None, 'pass'), ('user1', None), None, ]) @pytest.mark.parametrize('image,registry,path', [ ('not-used.com/spam:latest', 'localhost.com', '/v2/spam/manifests/latest'), ('not-used.com/food/spam:latest', 'http://localhost.com', '/v2/food/spam/manifests/latest'), ('not-used.com/spam', 'https://localhost.com', '/v2/spam/manifests/latest'), ]) @responses.activate def test_get_manifest_digests(tmpdir, image, registry, insecure, creds, versions, require_digest, path): kwargs = {} image = ImageName.parse(image) kwargs['image'] = image if creds: temp_dir = mkdtemp(dir=str(tmpdir)) with open(os.path.join(temp_dir, '.dockercfg'), 'w+') as dockerconfig: dockerconfig.write(json.dumps({ image.registry: { 'username': creds[0], 'password': creds[1] } })) kwargs['dockercfg_path'] = temp_dir kwargs['registry'] = registry if insecure is not None: kwargs['insecure'] = insecure if versions is not None: kwargs['versions'] = versions kwargs['require_digest'] = require_digest def request_callback(request, all_headers=True): if creds and creds[0] and creds[1]: assert request.headers['Authorization'] media_type = request.headers['Accept'] if media_type.endswith('list.v2+json'): digest = 'v2_list-digest' elif media_type.endswith('v2+json'): digest = 'v2-digest' elif media_type.endswith('v1+json'): digest = 'v1-digest' else: raise ValueError('Unexpected media type {}'.format(media_type)) media_type_prefix = media_type.split('+')[0] if all_headers: headers = { 'Content-Type': '{}+jsonish'.format(media_type_prefix), } if not media_type.endswith('list.v2+json'): headers['Docker-Content-Digest'] = digest else: headers = {} return (200, headers, '') if registry.startswith('http'): url = registry + path else: # In the insecure case, we should try the https URL, and when that produces # an error, fall back to http if insecure: https_url = 'https://' + registry + path responses.add(responses.GET, https_url, body=ConnectionError()) url = 'http://' + registry + path else: url = 'https://' + registry + path responses.add_callback(responses.GET, url, callback=request_callback) expected_versions = versions if versions is None: # Test default versions value expected_versions = ('v1', 'v2') expected_result = dict( (version, '{}-digest'.format(version)) for version in expected_versions) if versions and 'v2_list' in versions: expected_result['v2_list'] = True if expected_versions: actual_digests = get_manifest_digests(**kwargs) assert actual_digests.v1 == expected_result.get('v1') assert actual_digests.v2 == expected_result.get('v2') if 'v2_list' in expected_result: assert actual_digests.v2_list == expected_result.get('v2_list') elif require_digest: with pytest.raises(RuntimeError): get_manifest_digests(**kwargs) else: get_manifest_digests(**kwargs) @pytest.mark.parametrize('has_content_type_header', [ True, False ]) @pytest.mark.parametrize('has_content_digest', [ True, False ]) @pytest.mark.parametrize('manifest_type,can_convert_v2_v1', [ ('v1', False), ('v2', True), ('v2', False), ('oci', False) ]) def test_get_manifest_digests_missing(tmpdir, has_content_type_header, has_content_digest, manifest_type, can_convert_v2_v1): kwargs = {} image = ImageName.parse('example.com/spam:latest') kwargs['image'] = image kwargs['registry'] = 'https://example.com' expected_url = 'https://example.com/v2/spam/manifests/latest' mock_get_retry_session() def custom_get(url, headers, **kwargs): assert url == expected_url media_type = headers['Accept'] media_type_prefix = media_type.split('+')[0] assert media_type.endswith('+json') # Attempt to simulate how a docker registry behaves: # * If the stored digest is v1, return it # * If the stored digest is v2, and v2 is requested, return it # * If the stored digest is v2, and v1 is requested, try # to convert and return v1 or an error. if manifest_type == 'v1': digest = 'v1-digest' media_type_prefix = 'application/vnd.docker.distribution.manifest.v1' elif manifest_type == 'v2': if media_type_prefix == 'application/vnd.docker.distribution.manifest.v2': digest = 'v2-digest' else: if not can_convert_v2_v1: response_json = {"errors": [{"code": "MANIFEST_INVALID"}]} response = requests.Response() flexmock(response, status_code=400, content=json.dumps(response_json).encode("utf-8"), headers=headers) return response digest = 'v1-converted-digest' media_type_prefix = 'application/vnd.docker.distribution.manifest.v1' elif manifest_type == 'oci': if media_type_prefix == 'application/vnd.oci.image.manifest.v1': digest = 'oci-digest' else: headers = {} response_json = {"errors": [{"code": "MANIFEST_UNKNOWN"}]} response = requests.Response() flexmock(response, status_code=requests.codes.not_found, content=json.dumps(response_json).encode("utf-8"), headers=headers) return response headers = {} if has_content_type_header: headers['Content-Type'] = '{}+jsonish'.format(media_type_prefix) if has_content_digest: headers['Docker-Content-Digest'] = digest if media_type_prefix == 'application/vnd.docker.distribution.manifest.v1': response_json = {'schemaVersion': 1} else: response_json = {'schemaVersion': 2, 'mediaType': media_type_prefix + '+json'} response = requests.Response() flexmock(response, status_code=200, content=json.dumps(response_json).encode("utf-8"), headers=headers) return response (flexmock(requests.Session) .should_receive('get') .replace_with(custom_get)) if manifest_type == 'v1' and not has_content_type_header: # v1 manifests don't have a mediaType field, so we can't fall back # to looking at the returned manifest to detect the type. with pytest.raises(RuntimeError): get_manifest_digests(**kwargs) return else: actual_digests = get_manifest_digests(**kwargs) if manifest_type == 'v1': if has_content_digest: assert actual_digests.v1 == 'v1-digest' else: assert actual_digests.v1 is True assert actual_digests.v2 is None assert actual_digests.oci is None elif manifest_type == 'v2': if can_convert_v2_v1: if has_content_type_header: if has_content_digest: assert actual_digests.v1 == 'v1-converted-digest' else: assert actual_digests.v1 is True else: # don't even know the response is v1 without Content-Type assert actual_digests.v1 is None else: assert actual_digests.v1 is None if has_content_digest: assert actual_digests.v2 == 'v2-digest' else: assert actual_digests.v2 is True assert actual_digests.oci is None elif manifest_type == 'oci': assert actual_digests.v1 is None assert actual_digests.v2 is None if has_content_digest: assert actual_digests.oci == 'oci-digest' else: assert actual_digests.oci is True @responses.activate def test_get_manifest_digests_connection_error(tmpdir): # Test that our code to handle falling back from https to http # doesn't do anything unexpected when a connection can't be # made at all. kwargs = {} kwargs['image'] = ImageName.parse('example.com/spam:latest') kwargs['registry'] = 'https://example.com' url = 'https://example.com/v2/spam/manifests/latest' responses.add(responses.GET, url, body=ConnectionError()) with pytest.raises(ConnectionError): get_manifest_digests(**kwargs) @pytest.mark.parametrize('v1,v2,oci,default', [ ('v1-digest', 'v2-digest', None, 'v2-digest'), ('v1-digest', None, None, 'v1-digest'), (None, 'v2-digest', None, 'v2-digest'), (None, 'v2-digest', None, 'v2-digest'), (None, None, 'oci-digest', 'oci-digest'), (None, 'v2-digest', 'oci-digest', 'oci-digest'), (None, None, None, None), ]) def test_manifest_digest(v1, v2, oci, default): md = ManifestDigest(v1=v1, v2=v2, oci=oci) assert md.v1 == v1 assert md.v2 == v2 assert md.oci == oci assert md.default == default @pytest.mark.parametrize('environ,expected', [ ({'BUILD': '{"foo": "bar"}'}, {'foo': 'bar'}), ({}, False), ]) def test_get_build_json(environ, expected): flexmock(os, environ=environ) if expected: assert get_build_json() == {'foo': 'bar'} else: with pytest.raises(KeyError): get_build_json() @pytest.mark.parametrize('build_json,scratch', [ ({'metadata': {'labels': {'scratch': True}}}, True), ({'metadata': {'labels': {'scratch': False}}}, False), ({'metadata': {'labels': {}}}, False), ({'metadata': {}}, None), ({}, None), ]) def test_is_scratch_build(build_json, scratch): flexmock(util).should_receive('get_build_json').and_return(build_json) if scratch is None: with pytest.raises(KeyError): is_scratch_build() else: assert is_scratch_build() == scratch def test_df_parser(tmpdir): tmpdir_path = str(tmpdir.realpath()) df = df_parser(tmpdir_path) df.lines = [ "FROM fedora\n", "ENV foo=\"bar\"\n", "LABEL label=\"foobar barfoo\"\n" ] assert len(df.envs) == 1 assert df.envs.get('foo') == 'bar' assert len(df.labels) == 1 assert df.labels.get('label') == 'foobar barfoo' def test_df_parser_parent_env_arg(tmpdir): p_env = { "test_env": "first" } df_content = dedent("""\ FROM fedora ENV foo=bar LABEL label="foobar $test_env" """) df = df_parser(str(tmpdir), parent_env=p_env) df.content = df_content assert df.labels.get('label') == 'foobar first' @pytest.mark.parametrize('env_arg', [ {"test_env": "first"}, ['test_env=first'], ['test_env='], ['test_env=--option=first --option=second'], ['test_env_first'], ]) def test_df_parser_parent_env_wf(tmpdir, caplog, env_arg): df_content = dedent("""\ FROM fedora ENV foo=bar LABEL label="foobar $test_env" """) env_conf = {INSPECT_CONFIG: {"Env": env_arg}} workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') flexmock(workflow, base_image_inspect=env_conf) df = df_parser(str(tmpdir), workflow=workflow) df.content = df_content if isinstance(env_arg, list) and ('=' not in env_arg[0]): expected_log_message = "Unable to parse all of Parent Config ENV" assert expected_log_message in [l.getMessage() for l in caplog.records()] elif isinstance(env_arg, dict): assert df.labels.get('label') == ('foobar ' + env_arg['test_env']) else: assert df.labels.get('label') == 'foobar ' + env_arg[0].split('=', 1)[1] @pytest.mark.parametrize(('available', 'requested', 'result'), ( (['spam', 'bacon', 'eggs'], ['spam'], True), (['spam', 'bacon', 'eggs'], ['spam', 'bacon'], True), (['spam', 'bacon', 'eggs'], ['spam', 'bacon', 'eggs'], True), (['spam', 'bacon', 'eggs'], ['spam', 'eggs'], True), (['spam', 'bacon', 'eggs'], ['eggs', 'spam'], False), (['spam', 'bacon', 'eggs'], ['spam', 'eggs', 'bacon'], False), (['spam', 'bacon', 'eggs'], ['sausage'], False), )) def test_are_plugins_in_order(available, requested, result): assert are_plugins_in_order([{'name': plugin} for plugin in available], *requested) == result @pytest.mark.parametrize(('test_string', 'labels', 'expected'), [ ('', {}, ''), ('', {'version': 'cat'}, ''), ('dog', {'version': 'cat'}, 'dog'), ('dog', {}, 'dog'), ('{version}', {'version': 'cat'}, 'cat'), ('dog-{version}', {'version': 'cat'}, 'dog-cat'), ('{version}', {}, None), ('{Version}', {'version': 'cat'}, None), ]) def test_label_formatter(labels, test_string, expected): if expected is not None: assert expected == LabelFormatter().vformat(test_string, [], labels) else: with pytest.raises(KeyError): LabelFormatter().vformat(test_string, [], labels) @pytest.mark.parametrize(('tag_conf', 'tag_annotation', 'expected'), ( (['spam', 'bacon'], [], ['spam', 'bacon']), ([], ['spam', 'bacon'], ['spam', 'bacon']), (['spam', 'bacon'], ['ignored', 'scorned'], ['spam', 'bacon']), )) def test_get_primary_images(tag_conf, tag_annotation, expected): template_image = ImageName.parse('registry.example.com/fedora') workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') for tag in tag_conf: image_name = ImageName.parse(str(template_image)) image_name.tag = tag workflow.tag_conf.add_primary_image(str(image_name)) annotations = {} for tag in tag_annotation: annotations.setdefault('repositories', {}).setdefault('primary', []) image_name = ImageName.parse(str(template_image)) image_name.tag = tag annotations['repositories']['primary'].append(str(image_name)) build_result = BuildResult(annotations=annotations, image_id='foo') workflow.build_result = build_result actual = get_primary_images(workflow) assert len(actual) == len(expected) for index, primary_image in enumerate(actual): assert primary_image.registry == template_image.registry assert primary_image.namespace == template_image.namespace assert primary_image.repo == template_image.repo assert primary_image.tag == expected[index]
maxamillion/atomic-reactor
tests/test_util.py
atomic_reactor/plugins/pre_koji_parent.py
# Copyright 2013 dotCloud inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import six import requests.adapters import socket if six.PY3: import http.client as httplib else: import httplib try: import requests.packages.urllib3 as urllib3 except ImportError: import urllib3 RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer class UnixHTTPConnection(httplib.HTTPConnection, object): def __init__(self, base_url, unix_socket, timeout=60): super(UnixHTTPConnection, self).__init__( 'localhost', timeout=timeout ) self.base_url = base_url self.unix_socket = unix_socket self.timeout = timeout def connect(self): sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.settimeout(self.timeout) sock.connect(self.unix_socket) self.sock = sock class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): def __init__(self, base_url, socket_path, timeout=60): super(UnixHTTPConnectionPool, self).__init__( 'localhost', timeout=timeout ) self.base_url = base_url self.socket_path = socket_path self.timeout = timeout def _new_conn(self): return UnixHTTPConnection(self.base_url, self.socket_path, self.timeout) class UnixAdapter(requests.adapters.HTTPAdapter): def __init__(self, socket_url, timeout=60): socket_path = socket_url.replace('http+unix://', '') if not socket_path.startswith('/'): socket_path = '/' + socket_path self.socket_path = socket_path self.timeout = timeout self.pools = RecentlyUsedContainer(10, dispose_func=lambda p: p.close()) super(UnixAdapter, self).__init__() def get_connection(self, url, proxies=None): with self.pools.lock: pool = self.pools.get(url) if pool: return pool pool = UnixHTTPConnectionPool( url, self.socket_path, self.timeout ) self.pools[url] = pool return pool def request_url(self, request, proxies): # The select_proxy utility in requests errors out when the provided URL # doesn't have a hostname, like is the case when using a UNIX socket. # Since proxies are an irrelevant notion in the case of UNIX sockets # anyway, we simply return the path URL directly. # See also: https://github.com/docker/docker-py/issues/811 return request.path_url def close(self): self.pools.clear()
# -*- coding: utf-8 -*- import datetime import json import signal import docker import pytest import six from . import fake_api from ..helpers import requires_api_version from .api_test import ( BaseAPIClientTest, url_prefix, fake_request, DEFAULT_TIMEOUT_SECONDS, fake_inspect_container ) try: from unittest import mock except ImportError: import mock def fake_inspect_container_tty(self, container): return fake_inspect_container(self, container, tty=True) class StartContainerTest(BaseAPIClientTest): def test_start_container(self): self.client.start(fake_api.FAKE_CONTAINER_ID) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/3cc2351ab11b/start' ) assert 'data' not in args[1] self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_start_container_none(self): with pytest.raises(ValueError) as excinfo: self.client.start(container=None) self.assertEqual( str(excinfo.value), 'Resource ID was not provided', ) with pytest.raises(ValueError) as excinfo: self.client.start(None) self.assertEqual( str(excinfo.value), 'Resource ID was not provided', ) def test_start_container_regression_573(self): self.client.start(**{'container': fake_api.FAKE_CONTAINER_ID}) def test_start_container_with_lxc_conf(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, lxc_conf={'lxc.conf.k': 'lxc.conf.value'} ) def test_start_container_with_lxc_conf_compat(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, lxc_conf=[{'Key': 'lxc.conf.k', 'Value': 'lxc.conf.value'}] ) def test_start_container_with_binds_ro(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, binds={ '/tmp': { "bind": '/mnt', "ro": True } } ) def test_start_container_with_binds_rw(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, binds={ '/tmp': {"bind": '/mnt', "ro": False} } ) def test_start_container_with_port_binds(self): self.maxDiff = None with pytest.raises(docker.errors.DeprecatedMethod): self.client.start(fake_api.FAKE_CONTAINER_ID, port_bindings={ 1111: None, 2222: 2222, '3333/udp': (3333,), 4444: ('127.0.0.1',), 5555: ('127.0.0.1', 5555), 6666: [('127.0.0.1',), ('192.168.0.1',)] }) def test_start_container_with_links(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, links={'path': 'alias'} ) def test_start_container_with_multiple_links(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, links={ 'path1': 'alias1', 'path2': 'alias2' } ) def test_start_container_with_links_as_list_of_tuples(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start(fake_api.FAKE_CONTAINER_ID, links=[('path', 'alias')]) def test_start_container_privileged(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start(fake_api.FAKE_CONTAINER_ID, privileged=True) def test_start_container_with_dict_instead_of_id(self): self.client.start({'Id': fake_api.FAKE_CONTAINER_ID}) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/3cc2351ab11b/start' ) assert 'data' not in args[1] self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) class CreateContainerTest(BaseAPIClientTest): def test_create_container(self): self.client.create_container('busybox', 'true') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["true"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "StdinOnce": false, "OpenStdin": false, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_binds(self): mount_dest = '/mnt' self.client.create_container('busybox', ['ls', mount_dest], volumes=[mount_dest]) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls", "/mnt"], "AttachStdin": false, "Volumes": {"/mnt": {}}, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_volume_string(self): mount_dest = '/mnt' self.client.create_container('busybox', ['ls', mount_dest], volumes=mount_dest) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls", "/mnt"], "AttachStdin": false, "Volumes": {"/mnt": {}}, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_ports(self): self.client.create_container('busybox', 'ls', ports=[1111, (2222, 'udp'), (3333,)]) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "ExposedPorts": { "1111/tcp": {}, "2222/udp": {}, "3333/tcp": {} }, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_entrypoint(self): self.client.create_container('busybox', 'hello', entrypoint='cowsay entry') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["hello"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "Entrypoint": ["cowsay", "entry"]}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_cpu_shares(self): with pytest.deprecated_call(): self.client.create_container('busybox', 'ls', cpu_shares=5) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "CpuShares": 5}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) @requires_api_version('1.18') def test_create_container_with_host_config_cpu_shares(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( cpu_shares=512 ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "HostConfig": { "CpuShares": 512, "NetworkMode": "default" }}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_cpuset(self): with pytest.deprecated_call(): self.client.create_container('busybox', 'ls', cpuset='0,1') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "Cpuset": "0,1", "CpusetCpus": "0,1"}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) @requires_api_version('1.18') def test_create_container_with_host_config_cpuset(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( cpuset_cpus='0,1' ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "HostConfig": { "CpusetCpus": "0,1", "NetworkMode": "default" }}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) @requires_api_version('1.19') def test_create_container_with_host_config_cpuset_mems(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( cpuset_mems='0' ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "HostConfig": { "CpusetMems": "0", "NetworkMode": "default" }}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_cgroup_parent(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( cgroup_parent='test' ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') data = json.loads(args[1]['data']) self.assertIn('HostConfig', data) self.assertIn('CgroupParent', data['HostConfig']) self.assertEqual(data['HostConfig']['CgroupParent'], 'test') def test_create_container_with_working_dir(self): self.client.create_container('busybox', 'ls', working_dir='/root') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "WorkingDir": "/root"}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_stdin_open(self): self.client.create_container('busybox', 'true', stdin_open=True) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["true"], "AttachStdin": true, "AttachStderr": true, "AttachStdout": true, "StdinOnce": true, "OpenStdin": true, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_volumes_from(self): vol_names = ['foo', 'bar'] try: self.client.create_container('busybox', 'true', volumes_from=vol_names) except docker.errors.DockerException: self.assertTrue( docker.utils.compare_version('1.10', self.client._version) >= 0 ) return args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data'])['VolumesFrom'], ','.join(vol_names)) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_empty_volumes_from(self): with pytest.raises(docker.errors.InvalidVersion): self.client.create_container('busybox', 'true', volumes_from=[]) def test_create_named_container(self): self.client.create_container('busybox', 'true', name='marisa-kirisame') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["true"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "StdinOnce": false, "OpenStdin": false, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual(args[1]['params'], {'name': 'marisa-kirisame'}) def test_create_container_with_mem_limit_as_int(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( mem_limit=128.0 ) ) args = fake_request.call_args data = json.loads(args[1]['data']) self.assertEqual(data['HostConfig']['Memory'], 128.0) def test_create_container_with_mem_limit_as_string(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( mem_limit='128' ) ) args = fake_request.call_args data = json.loads(args[1]['data']) self.assertEqual(data['HostConfig']['Memory'], 128.0) def test_create_container_with_mem_limit_as_string_with_k_unit(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( mem_limit='128k' ) ) args = fake_request.call_args data = json.loads(args[1]['data']) self.assertEqual(data['HostConfig']['Memory'], 128.0 * 1024) def test_create_container_with_mem_limit_as_string_with_m_unit(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( mem_limit='128m' ) ) args = fake_request.call_args data = json.loads(args[1]['data']) self.assertEqual(data['HostConfig']['Memory'], 128.0 * 1024 * 1024) def test_create_container_with_mem_limit_as_string_with_g_unit(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( mem_limit='128g' ) ) args = fake_request.call_args data = json.loads(args[1]['data']) self.assertEqual( data['HostConfig']['Memory'], 128.0 * 1024 * 1024 * 1024 ) def test_create_container_with_mem_limit_as_string_with_wrong_value(self): self.assertRaises( docker.errors.DockerException, self.client.create_host_config, mem_limit='128p' ) self.assertRaises( docker.errors.DockerException, self.client.create_host_config, mem_limit='1f28' ) def test_create_container_with_lxc_conf(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( lxc_conf={'lxc.conf.k': 'lxc.conf.value'} ) ) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/create' ) expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['LxcConf'] = [ {"Value": "lxc.conf.value", "Key": "lxc.conf.k"} ] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_lxc_conf_compat(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( lxc_conf=[{'Key': 'lxc.conf.k', 'Value': 'lxc.conf.value'}] ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['LxcConf'] = [ {"Value": "lxc.conf.value", "Key": "lxc.conf.k"} ] self.assertEqual( json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_binds_ro(self): mount_dest = '/mnt' mount_origin = '/tmp' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( binds={mount_origin: { "bind": mount_dest, "ro": True }} ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:ro"] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_binds_rw(self): mount_dest = '/mnt' mount_origin = '/tmp' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( binds={mount_origin: { "bind": mount_dest, "ro": False }} ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:rw"] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_binds_mode(self): mount_dest = '/mnt' mount_origin = '/tmp' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( binds={mount_origin: { "bind": mount_dest, "mode": "z", }} ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:z"] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_binds_mode_and_ro_error(self): with pytest.raises(ValueError): mount_dest = '/mnt' mount_origin = '/tmp' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( binds={mount_origin: { "bind": mount_dest, "mode": "z", "ro": True, }} ) ) def test_create_container_with_binds_list(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( binds=[ "/tmp:/mnt/1:ro", "/tmp:/mnt/2", ], ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Binds'] = [ "/tmp:/mnt/1:ro", "/tmp:/mnt/2", ] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_port_binds(self): self.maxDiff = None self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( port_bindings={ 1111: None, 2222: 2222, '3333/udp': (3333,), 4444: ('127.0.0.1',), 5555: ('127.0.0.1', 5555), 6666: [('127.0.0.1',), ('192.168.0.1',)] } ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') data = json.loads(args[1]['data']) port_bindings = data['HostConfig']['PortBindings'] self.assertTrue('1111/tcp' in port_bindings) self.assertTrue('2222/tcp' in port_bindings) self.assertTrue('3333/udp' in port_bindings) self.assertTrue('4444/tcp' in port_bindings) self.assertTrue('5555/tcp' in port_bindings) self.assertTrue('6666/tcp' in port_bindings) self.assertEqual( [{"HostPort": "", "HostIp": ""}], port_bindings['1111/tcp'] ) self.assertEqual( [{"HostPort": "2222", "HostIp": ""}], port_bindings['2222/tcp'] ) self.assertEqual( [{"HostPort": "3333", "HostIp": ""}], port_bindings['3333/udp'] ) self.assertEqual( [{"HostPort": "", "HostIp": "127.0.0.1"}], port_bindings['4444/tcp'] ) self.assertEqual( [{"HostPort": "5555", "HostIp": "127.0.0.1"}], port_bindings['5555/tcp'] ) self.assertEqual(len(port_bindings['6666/tcp']), 2) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_mac_address(self): expected = "02:42:ac:11:00:0a" self.client.create_container( 'busybox', ['sleep', '60'], mac_address=expected ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') data = json.loads(args[1]['data']) assert data['MacAddress'] == expected def test_create_container_with_links(self): link_path = 'path' alias = 'alias' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( links={link_path: alias} ) ) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/create' ) expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Links'] = ['path:alias'] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) def test_create_container_with_multiple_links(self): link_path = 'path' alias = 'alias' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( links={ link_path + '1': alias + '1', link_path + '2': alias + '2' } ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Links'] = [ 'path1:alias1', 'path2:alias2' ] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) def test_create_container_with_links_as_list_of_tuples(self): link_path = 'path' alias = 'alias' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( links=[(link_path, alias)] ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Links'] = ['path:alias'] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) def test_create_container_privileged(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config(privileged=True) ) expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Privileged'] = True args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_restart_policy(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( restart_policy={ "Name": "always", "MaximumRetryCount": 0 } ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['RestartPolicy'] = { "MaximumRetryCount": 0, "Name": "always" } self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_added_capabilities(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config(cap_add=['MKNOD']) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['CapAdd'] = ['MKNOD'] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_dropped_capabilities(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config(cap_drop=['MKNOD']) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['CapDrop'] = ['MKNOD'] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_devices(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( devices=['/dev/sda:/dev/xvda:rwm', '/dev/sdb:/dev/xvdb', '/dev/sdc'] ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Devices'] = [ {'CgroupPermissions': 'rwm', 'PathInContainer': '/dev/xvda', 'PathOnHost': '/dev/sda'}, {'CgroupPermissions': 'rwm', 'PathInContainer': '/dev/xvdb', 'PathOnHost': '/dev/sdb'}, {'CgroupPermissions': 'rwm', 'PathInContainer': '/dev/sdc', 'PathOnHost': '/dev/sdc'} ] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_labels_dict(self): labels_dict = { six.text_type('foo'): six.text_type('1'), six.text_type('bar'): six.text_type('2'), } self.client.create_container( 'busybox', 'true', labels=labels_dict, ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data'])['Labels'], labels_dict) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_labels_list(self): labels_list = [ six.text_type('foo'), six.text_type('bar'), ] labels_dict = { six.text_type('foo'): six.text_type(), six.text_type('bar'): six.text_type(), } self.client.create_container( 'busybox', 'true', labels=labels_list, ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data'])['Labels'], labels_dict) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_named_volume(self): mount_dest = '/mnt' volume_name = 'name' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( volume_driver='foodriver', binds={volume_name: { "bind": mount_dest, "ro": False }}), ) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/create' ) expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['VolumeDriver'] = 'foodriver' expected_payload['HostConfig']['Binds'] = ["name:/mnt:rw"] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_stop_signal(self): self.client.create_container('busybox', 'ls', stop_signal='SIGINT') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "StopSignal": "SIGINT"}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) @requires_api_version('1.22') def test_create_container_with_aliases(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( network_mode='some-network', ), networking_config=self.client.create_networking_config({ 'some-network': self.client.create_endpoint_config( aliases=['foo', 'bar'], ), }), ) args = fake_request.call_args self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "HostConfig": { "NetworkMode": "some-network" }, "NetworkingConfig": { "EndpointsConfig": { "some-network": {"Aliases": ["foo", "bar"]} } }}''')) @requires_api_version('1.22') def test_create_container_with_tmpfs_list(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( tmpfs=[ "/tmp", "/mnt:size=3G,uid=100" ] ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Tmpfs'] = { "/tmp": "", "/mnt": "size=3G,uid=100" } self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) @requires_api_version('1.22') def test_create_container_with_tmpfs_dict(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( tmpfs={ "/tmp": "", "/mnt": "size=3G,uid=100" } ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Tmpfs'] = { "/tmp": "", "/mnt": "size=3G,uid=100" } self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) @requires_api_version('1.24') def test_create_container_with_sysctl(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( sysctls={ 'net.core.somaxconn': 1024, 'net.ipv4.tcp_syncookies': '0', } ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Sysctls'] = { 'net.core.somaxconn': '1024', 'net.ipv4.tcp_syncookies': '0', } self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_unicode_envvars(self): envvars_dict = { 'foo': u'☃', } expected = [ u'foo=☃' ] self.client.create_container( 'busybox', 'true', environment=envvars_dict, ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data'])['Env'], expected) @requires_api_version('1.25') def test_create_container_with_host_config_cpus(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( cpu_count=1, cpu_percent=20, nano_cpus=1000 ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "HostConfig": { "CpuCount": 1, "CpuPercent": 20, "NanoCpus": 1000, "NetworkMode": "default" }}''')) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) class ContainerTest(BaseAPIClientTest): def test_list_containers(self): self.client.containers(all=True) fake_request.assert_called_with( 'GET', url_prefix + 'containers/json', params={ 'all': 1, 'since': None, 'size': 0, 'limit': -1, 'trunc_cmd': 0, 'before': None }, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_resize_container(self): self.client.resize( {'Id': fake_api.FAKE_CONTAINER_ID}, height=15, width=120 ) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/resize', params={'h': 15, 'w': 120}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_rename_container(self): self.client.rename( {'Id': fake_api.FAKE_CONTAINER_ID}, name='foobar' ) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/rename', params={'name': 'foobar'}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_wait(self): self.client.wait(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/wait', timeout=None ) def test_wait_with_dict_instead_of_id(self): self.client.wait({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/wait', timeout=None ) def test_logs(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): logs = self.client.logs(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) self.assertEqual( logs, 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') ) def test_logs_with_dict_instead_of_id(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): logs = self.client.logs({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) self.assertEqual( logs, 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') ) def test_log_streaming(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True, follow=False) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=True ) def test_log_following(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, follow=True) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) def test_log_following_backwards(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=True ) def test_log_streaming_and_following(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True, follow=True) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=True ) def test_log_tail(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, follow=False, tail=10) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 10}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) def test_log_since(self): ts = 809222400 with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, follow=False, since=ts) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all', 'since': ts}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) def test_log_since_with_datetime(self): ts = 809222400 time = datetime.datetime.utcfromtimestamp(ts) with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, follow=False, since=time) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all', 'since': ts}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) def test_log_since_with_invalid_value_raises_error(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): with self.assertRaises(docker.errors.InvalidArgument): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, follow=False, since=42.42) def test_log_tty(self): m = mock.Mock() with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container_tty): with mock.patch('docker.api.client.APIClient._stream_raw_result', m): self.client.logs(fake_api.FAKE_CONTAINER_ID, follow=True, stream=True) self.assertTrue(m.called) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=True ) def test_diff(self): self.client.diff(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/changes', timeout=DEFAULT_TIMEOUT_SECONDS ) def test_diff_with_dict_instead_of_id(self): self.client.diff({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/changes', timeout=DEFAULT_TIMEOUT_SECONDS ) def test_port(self): self.client.port({'Id': fake_api.FAKE_CONTAINER_ID}, 1111) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/json', timeout=DEFAULT_TIMEOUT_SECONDS ) def test_stop_container(self): timeout = 2 self.client.stop(fake_api.FAKE_CONTAINER_ID, timeout=timeout) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/stop', params={'t': timeout}, timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) ) def test_stop_container_with_dict_instead_of_id(self): timeout = 2 self.client.stop({'Id': fake_api.FAKE_CONTAINER_ID}, timeout=timeout) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/stop', params={'t': timeout}, timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) ) def test_pause_container(self): self.client.pause(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/pause', timeout=(DEFAULT_TIMEOUT_SECONDS) ) def test_unpause_container(self): self.client.unpause(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/unpause', timeout=(DEFAULT_TIMEOUT_SECONDS) ) def test_kill_container(self): self.client.kill(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/kill', params={}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_kill_container_with_dict_instead_of_id(self): self.client.kill({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/kill', params={}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_kill_container_with_signal(self): self.client.kill(fake_api.FAKE_CONTAINER_ID, signal=signal.SIGTERM) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/kill', params={'signal': signal.SIGTERM}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_restart_container(self): self.client.restart(fake_api.FAKE_CONTAINER_ID, timeout=2) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/restart', params={'t': 2}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_restart_container_with_dict_instead_of_id(self): self.client.restart({'Id': fake_api.FAKE_CONTAINER_ID}, timeout=2) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/restart', params={'t': 2}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_remove_container(self): self.client.remove_container(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'DELETE', url_prefix + 'containers/3cc2351ab11b', params={'v': False, 'link': False, 'force': False}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_remove_container_with_dict_instead_of_id(self): self.client.remove_container({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'DELETE', url_prefix + 'containers/3cc2351ab11b', params={'v': False, 'link': False, 'force': False}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_export(self): self.client.export(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/export', stream=True, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_export_with_dict_instead_of_id(self): self.client.export({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/export', stream=True, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_inspect_container(self): self.client.inspect_container(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/json', timeout=DEFAULT_TIMEOUT_SECONDS ) def test_inspect_container_undefined_id(self): for arg in None, '', {True: True}: with pytest.raises(docker.errors.NullResource) as excinfo: self.client.inspect_container(arg) self.assertEqual( excinfo.value.args[0], 'Resource ID was not provided' ) def test_container_stats(self): self.client.stats(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/stats', timeout=60, stream=True ) def test_container_top(self): self.client.top(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/top', params={}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_container_top_with_psargs(self): self.client.top(fake_api.FAKE_CONTAINER_ID, 'waux') fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/top', params={'ps_args': 'waux'}, timeout=DEFAULT_TIMEOUT_SECONDS ) @requires_api_version('1.22') def test_container_update(self): self.client.update_container( fake_api.FAKE_CONTAINER_ID, mem_limit='2k', cpu_shares=124, blkio_weight=345 ) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/3cc2351ab11b/update' ) self.assertEqual( json.loads(args[1]['data']), {'Memory': 2 * 1024, 'CpuShares': 124, 'BlkioWeight': 345} ) self.assertEqual( args[1]['headers']['Content-Type'], 'application/json' )
shakamunyi/docker-py
tests/unit/api_container_test.py
docker/unixconn/unixconn.py
import six import requests.adapters from .. import constants from .npipesocket import NpipeSocket if six.PY3: import http.client as httplib else: import httplib try: import requests.packages.urllib3 as urllib3 except ImportError: import urllib3 RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer class NpipeHTTPConnection(httplib.HTTPConnection, object): def __init__(self, npipe_path, timeout=60): super(NpipeHTTPConnection, self).__init__( 'localhost', timeout=timeout ) self.npipe_path = npipe_path self.timeout = timeout def connect(self): sock = NpipeSocket() sock.settimeout(self.timeout) sock.connect(self.npipe_path) self.sock = sock class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): def __init__(self, npipe_path, timeout=60, maxsize=10): super(NpipeHTTPConnectionPool, self).__init__( 'localhost', timeout=timeout, maxsize=maxsize ) self.npipe_path = npipe_path self.timeout = timeout def _new_conn(self): return NpipeHTTPConnection( self.npipe_path, self.timeout ) # When re-using connections, urllib3 tries to call select() on our # NpipeSocket instance, causing a crash. To circumvent this, we override # _get_conn, where that check happens. def _get_conn(self, timeout): conn = None try: conn = self.pool.get(block=self.block, timeout=timeout) except AttributeError: # self.pool is None raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") except six.moves.queue.Empty: if self.block: raise urllib3.exceptions.EmptyPoolError( self, "Pool reached maximum size and no more " "connections are allowed." ) pass # Oh well, we'll create a new connection then return conn or self._new_conn() class NpipeAdapter(requests.adapters.HTTPAdapter): __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + ['npipe_path', 'pools', 'timeout'] def __init__(self, base_url, timeout=60, pool_connections=constants.DEFAULT_NUM_POOLS): self.npipe_path = base_url.replace('npipe://', '') self.timeout = timeout self.pools = RecentlyUsedContainer( pool_connections, dispose_func=lambda p: p.close() ) super(NpipeAdapter, self).__init__() def get_connection(self, url, proxies=None): with self.pools.lock: pool = self.pools.get(url) if pool: return pool pool = NpipeHTTPConnectionPool( self.npipe_path, self.timeout ) self.pools[url] = pool return pool def request_url(self, request, proxies): # The select_proxy utility in requests errors out when the provided URL # doesn't have a hostname, like is the case when using a UNIX socket. # Since proxies are an irrelevant notion in the case of UNIX sockets # anyway, we simply return the path URL directly. # See also: https://github.com/docker/docker-sdk-python/issues/811 return request.path_url def close(self): self.pools.clear()
# -*- coding: utf-8 -*- import datetime import json import signal import docker import pytest import six from . import fake_api from ..helpers import requires_api_version from .api_test import ( BaseAPIClientTest, url_prefix, fake_request, DEFAULT_TIMEOUT_SECONDS, fake_inspect_container ) try: from unittest import mock except ImportError: import mock def fake_inspect_container_tty(self, container): return fake_inspect_container(self, container, tty=True) class StartContainerTest(BaseAPIClientTest): def test_start_container(self): self.client.start(fake_api.FAKE_CONTAINER_ID) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/3cc2351ab11b/start' ) assert 'data' not in args[1] self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_start_container_none(self): with pytest.raises(ValueError) as excinfo: self.client.start(container=None) self.assertEqual( str(excinfo.value), 'Resource ID was not provided', ) with pytest.raises(ValueError) as excinfo: self.client.start(None) self.assertEqual( str(excinfo.value), 'Resource ID was not provided', ) def test_start_container_regression_573(self): self.client.start(**{'container': fake_api.FAKE_CONTAINER_ID}) def test_start_container_with_lxc_conf(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, lxc_conf={'lxc.conf.k': 'lxc.conf.value'} ) def test_start_container_with_lxc_conf_compat(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, lxc_conf=[{'Key': 'lxc.conf.k', 'Value': 'lxc.conf.value'}] ) def test_start_container_with_binds_ro(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, binds={ '/tmp': { "bind": '/mnt', "ro": True } } ) def test_start_container_with_binds_rw(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, binds={ '/tmp': {"bind": '/mnt', "ro": False} } ) def test_start_container_with_port_binds(self): self.maxDiff = None with pytest.raises(docker.errors.DeprecatedMethod): self.client.start(fake_api.FAKE_CONTAINER_ID, port_bindings={ 1111: None, 2222: 2222, '3333/udp': (3333,), 4444: ('127.0.0.1',), 5555: ('127.0.0.1', 5555), 6666: [('127.0.0.1',), ('192.168.0.1',)] }) def test_start_container_with_links(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, links={'path': 'alias'} ) def test_start_container_with_multiple_links(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, links={ 'path1': 'alias1', 'path2': 'alias2' } ) def test_start_container_with_links_as_list_of_tuples(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start(fake_api.FAKE_CONTAINER_ID, links=[('path', 'alias')]) def test_start_container_privileged(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start(fake_api.FAKE_CONTAINER_ID, privileged=True) def test_start_container_with_dict_instead_of_id(self): self.client.start({'Id': fake_api.FAKE_CONTAINER_ID}) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/3cc2351ab11b/start' ) assert 'data' not in args[1] self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) class CreateContainerTest(BaseAPIClientTest): def test_create_container(self): self.client.create_container('busybox', 'true') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["true"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "StdinOnce": false, "OpenStdin": false, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_binds(self): mount_dest = '/mnt' self.client.create_container('busybox', ['ls', mount_dest], volumes=[mount_dest]) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls", "/mnt"], "AttachStdin": false, "Volumes": {"/mnt": {}}, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_volume_string(self): mount_dest = '/mnt' self.client.create_container('busybox', ['ls', mount_dest], volumes=mount_dest) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls", "/mnt"], "AttachStdin": false, "Volumes": {"/mnt": {}}, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_ports(self): self.client.create_container('busybox', 'ls', ports=[1111, (2222, 'udp'), (3333,)]) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "ExposedPorts": { "1111/tcp": {}, "2222/udp": {}, "3333/tcp": {} }, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_entrypoint(self): self.client.create_container('busybox', 'hello', entrypoint='cowsay entry') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["hello"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "Entrypoint": ["cowsay", "entry"]}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_cpu_shares(self): with pytest.deprecated_call(): self.client.create_container('busybox', 'ls', cpu_shares=5) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "CpuShares": 5}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) @requires_api_version('1.18') def test_create_container_with_host_config_cpu_shares(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( cpu_shares=512 ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "HostConfig": { "CpuShares": 512, "NetworkMode": "default" }}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_cpuset(self): with pytest.deprecated_call(): self.client.create_container('busybox', 'ls', cpuset='0,1') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "Cpuset": "0,1", "CpusetCpus": "0,1"}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) @requires_api_version('1.18') def test_create_container_with_host_config_cpuset(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( cpuset_cpus='0,1' ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "HostConfig": { "CpusetCpus": "0,1", "NetworkMode": "default" }}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) @requires_api_version('1.19') def test_create_container_with_host_config_cpuset_mems(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( cpuset_mems='0' ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "HostConfig": { "CpusetMems": "0", "NetworkMode": "default" }}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_cgroup_parent(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( cgroup_parent='test' ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') data = json.loads(args[1]['data']) self.assertIn('HostConfig', data) self.assertIn('CgroupParent', data['HostConfig']) self.assertEqual(data['HostConfig']['CgroupParent'], 'test') def test_create_container_with_working_dir(self): self.client.create_container('busybox', 'ls', working_dir='/root') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "WorkingDir": "/root"}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_stdin_open(self): self.client.create_container('busybox', 'true', stdin_open=True) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["true"], "AttachStdin": true, "AttachStderr": true, "AttachStdout": true, "StdinOnce": true, "OpenStdin": true, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_volumes_from(self): vol_names = ['foo', 'bar'] try: self.client.create_container('busybox', 'true', volumes_from=vol_names) except docker.errors.DockerException: self.assertTrue( docker.utils.compare_version('1.10', self.client._version) >= 0 ) return args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data'])['VolumesFrom'], ','.join(vol_names)) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_empty_volumes_from(self): with pytest.raises(docker.errors.InvalidVersion): self.client.create_container('busybox', 'true', volumes_from=[]) def test_create_named_container(self): self.client.create_container('busybox', 'true', name='marisa-kirisame') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["true"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "StdinOnce": false, "OpenStdin": false, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual(args[1]['params'], {'name': 'marisa-kirisame'}) def test_create_container_with_mem_limit_as_int(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( mem_limit=128.0 ) ) args = fake_request.call_args data = json.loads(args[1]['data']) self.assertEqual(data['HostConfig']['Memory'], 128.0) def test_create_container_with_mem_limit_as_string(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( mem_limit='128' ) ) args = fake_request.call_args data = json.loads(args[1]['data']) self.assertEqual(data['HostConfig']['Memory'], 128.0) def test_create_container_with_mem_limit_as_string_with_k_unit(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( mem_limit='128k' ) ) args = fake_request.call_args data = json.loads(args[1]['data']) self.assertEqual(data['HostConfig']['Memory'], 128.0 * 1024) def test_create_container_with_mem_limit_as_string_with_m_unit(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( mem_limit='128m' ) ) args = fake_request.call_args data = json.loads(args[1]['data']) self.assertEqual(data['HostConfig']['Memory'], 128.0 * 1024 * 1024) def test_create_container_with_mem_limit_as_string_with_g_unit(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( mem_limit='128g' ) ) args = fake_request.call_args data = json.loads(args[1]['data']) self.assertEqual( data['HostConfig']['Memory'], 128.0 * 1024 * 1024 * 1024 ) def test_create_container_with_mem_limit_as_string_with_wrong_value(self): self.assertRaises( docker.errors.DockerException, self.client.create_host_config, mem_limit='128p' ) self.assertRaises( docker.errors.DockerException, self.client.create_host_config, mem_limit='1f28' ) def test_create_container_with_lxc_conf(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( lxc_conf={'lxc.conf.k': 'lxc.conf.value'} ) ) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/create' ) expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['LxcConf'] = [ {"Value": "lxc.conf.value", "Key": "lxc.conf.k"} ] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_lxc_conf_compat(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( lxc_conf=[{'Key': 'lxc.conf.k', 'Value': 'lxc.conf.value'}] ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['LxcConf'] = [ {"Value": "lxc.conf.value", "Key": "lxc.conf.k"} ] self.assertEqual( json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_binds_ro(self): mount_dest = '/mnt' mount_origin = '/tmp' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( binds={mount_origin: { "bind": mount_dest, "ro": True }} ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:ro"] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_binds_rw(self): mount_dest = '/mnt' mount_origin = '/tmp' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( binds={mount_origin: { "bind": mount_dest, "ro": False }} ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:rw"] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_binds_mode(self): mount_dest = '/mnt' mount_origin = '/tmp' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( binds={mount_origin: { "bind": mount_dest, "mode": "z", }} ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:z"] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_binds_mode_and_ro_error(self): with pytest.raises(ValueError): mount_dest = '/mnt' mount_origin = '/tmp' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( binds={mount_origin: { "bind": mount_dest, "mode": "z", "ro": True, }} ) ) def test_create_container_with_binds_list(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( binds=[ "/tmp:/mnt/1:ro", "/tmp:/mnt/2", ], ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Binds'] = [ "/tmp:/mnt/1:ro", "/tmp:/mnt/2", ] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_port_binds(self): self.maxDiff = None self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( port_bindings={ 1111: None, 2222: 2222, '3333/udp': (3333,), 4444: ('127.0.0.1',), 5555: ('127.0.0.1', 5555), 6666: [('127.0.0.1',), ('192.168.0.1',)] } ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') data = json.loads(args[1]['data']) port_bindings = data['HostConfig']['PortBindings'] self.assertTrue('1111/tcp' in port_bindings) self.assertTrue('2222/tcp' in port_bindings) self.assertTrue('3333/udp' in port_bindings) self.assertTrue('4444/tcp' in port_bindings) self.assertTrue('5555/tcp' in port_bindings) self.assertTrue('6666/tcp' in port_bindings) self.assertEqual( [{"HostPort": "", "HostIp": ""}], port_bindings['1111/tcp'] ) self.assertEqual( [{"HostPort": "2222", "HostIp": ""}], port_bindings['2222/tcp'] ) self.assertEqual( [{"HostPort": "3333", "HostIp": ""}], port_bindings['3333/udp'] ) self.assertEqual( [{"HostPort": "", "HostIp": "127.0.0.1"}], port_bindings['4444/tcp'] ) self.assertEqual( [{"HostPort": "5555", "HostIp": "127.0.0.1"}], port_bindings['5555/tcp'] ) self.assertEqual(len(port_bindings['6666/tcp']), 2) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_mac_address(self): expected = "02:42:ac:11:00:0a" self.client.create_container( 'busybox', ['sleep', '60'], mac_address=expected ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') data = json.loads(args[1]['data']) assert data['MacAddress'] == expected def test_create_container_with_links(self): link_path = 'path' alias = 'alias' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( links={link_path: alias} ) ) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/create' ) expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Links'] = ['path:alias'] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) def test_create_container_with_multiple_links(self): link_path = 'path' alias = 'alias' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( links={ link_path + '1': alias + '1', link_path + '2': alias + '2' } ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Links'] = [ 'path1:alias1', 'path2:alias2' ] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) def test_create_container_with_links_as_list_of_tuples(self): link_path = 'path' alias = 'alias' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( links=[(link_path, alias)] ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Links'] = ['path:alias'] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) def test_create_container_privileged(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config(privileged=True) ) expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Privileged'] = True args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_restart_policy(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( restart_policy={ "Name": "always", "MaximumRetryCount": 0 } ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['RestartPolicy'] = { "MaximumRetryCount": 0, "Name": "always" } self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_added_capabilities(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config(cap_add=['MKNOD']) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['CapAdd'] = ['MKNOD'] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_dropped_capabilities(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config(cap_drop=['MKNOD']) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['CapDrop'] = ['MKNOD'] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_devices(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( devices=['/dev/sda:/dev/xvda:rwm', '/dev/sdb:/dev/xvdb', '/dev/sdc'] ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Devices'] = [ {'CgroupPermissions': 'rwm', 'PathInContainer': '/dev/xvda', 'PathOnHost': '/dev/sda'}, {'CgroupPermissions': 'rwm', 'PathInContainer': '/dev/xvdb', 'PathOnHost': '/dev/sdb'}, {'CgroupPermissions': 'rwm', 'PathInContainer': '/dev/sdc', 'PathOnHost': '/dev/sdc'} ] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_labels_dict(self): labels_dict = { six.text_type('foo'): six.text_type('1'), six.text_type('bar'): six.text_type('2'), } self.client.create_container( 'busybox', 'true', labels=labels_dict, ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data'])['Labels'], labels_dict) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_labels_list(self): labels_list = [ six.text_type('foo'), six.text_type('bar'), ] labels_dict = { six.text_type('foo'): six.text_type(), six.text_type('bar'): six.text_type(), } self.client.create_container( 'busybox', 'true', labels=labels_list, ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data'])['Labels'], labels_dict) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_named_volume(self): mount_dest = '/mnt' volume_name = 'name' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( volume_driver='foodriver', binds={volume_name: { "bind": mount_dest, "ro": False }}), ) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/create' ) expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['VolumeDriver'] = 'foodriver' expected_payload['HostConfig']['Binds'] = ["name:/mnt:rw"] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_stop_signal(self): self.client.create_container('busybox', 'ls', stop_signal='SIGINT') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "StopSignal": "SIGINT"}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) @requires_api_version('1.22') def test_create_container_with_aliases(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( network_mode='some-network', ), networking_config=self.client.create_networking_config({ 'some-network': self.client.create_endpoint_config( aliases=['foo', 'bar'], ), }), ) args = fake_request.call_args self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "HostConfig": { "NetworkMode": "some-network" }, "NetworkingConfig": { "EndpointsConfig": { "some-network": {"Aliases": ["foo", "bar"]} } }}''')) @requires_api_version('1.22') def test_create_container_with_tmpfs_list(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( tmpfs=[ "/tmp", "/mnt:size=3G,uid=100" ] ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Tmpfs'] = { "/tmp": "", "/mnt": "size=3G,uid=100" } self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) @requires_api_version('1.22') def test_create_container_with_tmpfs_dict(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( tmpfs={ "/tmp": "", "/mnt": "size=3G,uid=100" } ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Tmpfs'] = { "/tmp": "", "/mnt": "size=3G,uid=100" } self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) @requires_api_version('1.24') def test_create_container_with_sysctl(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( sysctls={ 'net.core.somaxconn': 1024, 'net.ipv4.tcp_syncookies': '0', } ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Sysctls'] = { 'net.core.somaxconn': '1024', 'net.ipv4.tcp_syncookies': '0', } self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_unicode_envvars(self): envvars_dict = { 'foo': u'☃', } expected = [ u'foo=☃' ] self.client.create_container( 'busybox', 'true', environment=envvars_dict, ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data'])['Env'], expected) @requires_api_version('1.25') def test_create_container_with_host_config_cpus(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( cpu_count=1, cpu_percent=20, nano_cpus=1000 ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "HostConfig": { "CpuCount": 1, "CpuPercent": 20, "NanoCpus": 1000, "NetworkMode": "default" }}''')) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) class ContainerTest(BaseAPIClientTest): def test_list_containers(self): self.client.containers(all=True) fake_request.assert_called_with( 'GET', url_prefix + 'containers/json', params={ 'all': 1, 'since': None, 'size': 0, 'limit': -1, 'trunc_cmd': 0, 'before': None }, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_resize_container(self): self.client.resize( {'Id': fake_api.FAKE_CONTAINER_ID}, height=15, width=120 ) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/resize', params={'h': 15, 'w': 120}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_rename_container(self): self.client.rename( {'Id': fake_api.FAKE_CONTAINER_ID}, name='foobar' ) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/rename', params={'name': 'foobar'}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_wait(self): self.client.wait(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/wait', timeout=None ) def test_wait_with_dict_instead_of_id(self): self.client.wait({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/wait', timeout=None ) def test_logs(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): logs = self.client.logs(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) self.assertEqual( logs, 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') ) def test_logs_with_dict_instead_of_id(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): logs = self.client.logs({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) self.assertEqual( logs, 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') ) def test_log_streaming(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True, follow=False) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=True ) def test_log_following(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, follow=True) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) def test_log_following_backwards(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=True ) def test_log_streaming_and_following(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True, follow=True) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=True ) def test_log_tail(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, follow=False, tail=10) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 10}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) def test_log_since(self): ts = 809222400 with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, follow=False, since=ts) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all', 'since': ts}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) def test_log_since_with_datetime(self): ts = 809222400 time = datetime.datetime.utcfromtimestamp(ts) with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, follow=False, since=time) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all', 'since': ts}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) def test_log_since_with_invalid_value_raises_error(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): with self.assertRaises(docker.errors.InvalidArgument): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, follow=False, since=42.42) def test_log_tty(self): m = mock.Mock() with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container_tty): with mock.patch('docker.api.client.APIClient._stream_raw_result', m): self.client.logs(fake_api.FAKE_CONTAINER_ID, follow=True, stream=True) self.assertTrue(m.called) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=True ) def test_diff(self): self.client.diff(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/changes', timeout=DEFAULT_TIMEOUT_SECONDS ) def test_diff_with_dict_instead_of_id(self): self.client.diff({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/changes', timeout=DEFAULT_TIMEOUT_SECONDS ) def test_port(self): self.client.port({'Id': fake_api.FAKE_CONTAINER_ID}, 1111) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/json', timeout=DEFAULT_TIMEOUT_SECONDS ) def test_stop_container(self): timeout = 2 self.client.stop(fake_api.FAKE_CONTAINER_ID, timeout=timeout) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/stop', params={'t': timeout}, timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) ) def test_stop_container_with_dict_instead_of_id(self): timeout = 2 self.client.stop({'Id': fake_api.FAKE_CONTAINER_ID}, timeout=timeout) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/stop', params={'t': timeout}, timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) ) def test_pause_container(self): self.client.pause(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/pause', timeout=(DEFAULT_TIMEOUT_SECONDS) ) def test_unpause_container(self): self.client.unpause(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/unpause', timeout=(DEFAULT_TIMEOUT_SECONDS) ) def test_kill_container(self): self.client.kill(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/kill', params={}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_kill_container_with_dict_instead_of_id(self): self.client.kill({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/kill', params={}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_kill_container_with_signal(self): self.client.kill(fake_api.FAKE_CONTAINER_ID, signal=signal.SIGTERM) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/kill', params={'signal': signal.SIGTERM}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_restart_container(self): self.client.restart(fake_api.FAKE_CONTAINER_ID, timeout=2) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/restart', params={'t': 2}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_restart_container_with_dict_instead_of_id(self): self.client.restart({'Id': fake_api.FAKE_CONTAINER_ID}, timeout=2) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/restart', params={'t': 2}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_remove_container(self): self.client.remove_container(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'DELETE', url_prefix + 'containers/3cc2351ab11b', params={'v': False, 'link': False, 'force': False}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_remove_container_with_dict_instead_of_id(self): self.client.remove_container({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'DELETE', url_prefix + 'containers/3cc2351ab11b', params={'v': False, 'link': False, 'force': False}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_export(self): self.client.export(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/export', stream=True, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_export_with_dict_instead_of_id(self): self.client.export({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/export', stream=True, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_inspect_container(self): self.client.inspect_container(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/json', timeout=DEFAULT_TIMEOUT_SECONDS ) def test_inspect_container_undefined_id(self): for arg in None, '', {True: True}: with pytest.raises(docker.errors.NullResource) as excinfo: self.client.inspect_container(arg) self.assertEqual( excinfo.value.args[0], 'Resource ID was not provided' ) def test_container_stats(self): self.client.stats(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/stats', timeout=60, stream=True ) def test_container_top(self): self.client.top(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/top', params={}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_container_top_with_psargs(self): self.client.top(fake_api.FAKE_CONTAINER_ID, 'waux') fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/top', params={'ps_args': 'waux'}, timeout=DEFAULT_TIMEOUT_SECONDS ) @requires_api_version('1.22') def test_container_update(self): self.client.update_container( fake_api.FAKE_CONTAINER_ID, mem_limit='2k', cpu_shares=124, blkio_weight=345 ) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/3cc2351ab11b/update' ) self.assertEqual( json.loads(args[1]['data']), {'Memory': 2 * 1024, 'CpuShares': 124, 'BlkioWeight': 345} ) self.assertEqual( args[1]['headers']['Content-Type'], 'application/json' )
shakamunyi/docker-py
tests/unit/api_container_test.py
docker/transport/npipeconn.py
# flake8: noqa from .unixconn import UnixAdapter from .ssladapter import SSLAdapter try: from .npipeconn import NpipeAdapter from .npipesocket import NpipeSocket except ImportError: pass
# -*- coding: utf-8 -*- import datetime import json import signal import docker import pytest import six from . import fake_api from ..helpers import requires_api_version from .api_test import ( BaseAPIClientTest, url_prefix, fake_request, DEFAULT_TIMEOUT_SECONDS, fake_inspect_container ) try: from unittest import mock except ImportError: import mock def fake_inspect_container_tty(self, container): return fake_inspect_container(self, container, tty=True) class StartContainerTest(BaseAPIClientTest): def test_start_container(self): self.client.start(fake_api.FAKE_CONTAINER_ID) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/3cc2351ab11b/start' ) assert 'data' not in args[1] self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_start_container_none(self): with pytest.raises(ValueError) as excinfo: self.client.start(container=None) self.assertEqual( str(excinfo.value), 'Resource ID was not provided', ) with pytest.raises(ValueError) as excinfo: self.client.start(None) self.assertEqual( str(excinfo.value), 'Resource ID was not provided', ) def test_start_container_regression_573(self): self.client.start(**{'container': fake_api.FAKE_CONTAINER_ID}) def test_start_container_with_lxc_conf(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, lxc_conf={'lxc.conf.k': 'lxc.conf.value'} ) def test_start_container_with_lxc_conf_compat(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, lxc_conf=[{'Key': 'lxc.conf.k', 'Value': 'lxc.conf.value'}] ) def test_start_container_with_binds_ro(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, binds={ '/tmp': { "bind": '/mnt', "ro": True } } ) def test_start_container_with_binds_rw(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, binds={ '/tmp': {"bind": '/mnt', "ro": False} } ) def test_start_container_with_port_binds(self): self.maxDiff = None with pytest.raises(docker.errors.DeprecatedMethod): self.client.start(fake_api.FAKE_CONTAINER_ID, port_bindings={ 1111: None, 2222: 2222, '3333/udp': (3333,), 4444: ('127.0.0.1',), 5555: ('127.0.0.1', 5555), 6666: [('127.0.0.1',), ('192.168.0.1',)] }) def test_start_container_with_links(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, links={'path': 'alias'} ) def test_start_container_with_multiple_links(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start( fake_api.FAKE_CONTAINER_ID, links={ 'path1': 'alias1', 'path2': 'alias2' } ) def test_start_container_with_links_as_list_of_tuples(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start(fake_api.FAKE_CONTAINER_ID, links=[('path', 'alias')]) def test_start_container_privileged(self): with pytest.raises(docker.errors.DeprecatedMethod): self.client.start(fake_api.FAKE_CONTAINER_ID, privileged=True) def test_start_container_with_dict_instead_of_id(self): self.client.start({'Id': fake_api.FAKE_CONTAINER_ID}) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/3cc2351ab11b/start' ) assert 'data' not in args[1] self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) class CreateContainerTest(BaseAPIClientTest): def test_create_container(self): self.client.create_container('busybox', 'true') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["true"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "StdinOnce": false, "OpenStdin": false, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_binds(self): mount_dest = '/mnt' self.client.create_container('busybox', ['ls', mount_dest], volumes=[mount_dest]) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls", "/mnt"], "AttachStdin": false, "Volumes": {"/mnt": {}}, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_volume_string(self): mount_dest = '/mnt' self.client.create_container('busybox', ['ls', mount_dest], volumes=mount_dest) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls", "/mnt"], "AttachStdin": false, "Volumes": {"/mnt": {}}, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_ports(self): self.client.create_container('busybox', 'ls', ports=[1111, (2222, 'udp'), (3333,)]) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "ExposedPorts": { "1111/tcp": {}, "2222/udp": {}, "3333/tcp": {} }, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_entrypoint(self): self.client.create_container('busybox', 'hello', entrypoint='cowsay entry') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["hello"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "Entrypoint": ["cowsay", "entry"]}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_cpu_shares(self): with pytest.deprecated_call(): self.client.create_container('busybox', 'ls', cpu_shares=5) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "CpuShares": 5}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) @requires_api_version('1.18') def test_create_container_with_host_config_cpu_shares(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( cpu_shares=512 ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "HostConfig": { "CpuShares": 512, "NetworkMode": "default" }}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_cpuset(self): with pytest.deprecated_call(): self.client.create_container('busybox', 'ls', cpuset='0,1') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "Cpuset": "0,1", "CpusetCpus": "0,1"}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) @requires_api_version('1.18') def test_create_container_with_host_config_cpuset(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( cpuset_cpus='0,1' ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "HostConfig": { "CpusetCpus": "0,1", "NetworkMode": "default" }}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) @requires_api_version('1.19') def test_create_container_with_host_config_cpuset_mems(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( cpuset_mems='0' ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "HostConfig": { "CpusetMems": "0", "NetworkMode": "default" }}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_cgroup_parent(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( cgroup_parent='test' ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') data = json.loads(args[1]['data']) self.assertIn('HostConfig', data) self.assertIn('CgroupParent', data['HostConfig']) self.assertEqual(data['HostConfig']['CgroupParent'], 'test') def test_create_container_with_working_dir(self): self.client.create_container('busybox', 'ls', working_dir='/root') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "WorkingDir": "/root"}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_stdin_open(self): self.client.create_container('busybox', 'true', stdin_open=True) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["true"], "AttachStdin": true, "AttachStderr": true, "AttachStdout": true, "StdinOnce": true, "OpenStdin": true, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_with_volumes_from(self): vol_names = ['foo', 'bar'] try: self.client.create_container('busybox', 'true', volumes_from=vol_names) except docker.errors.DockerException: self.assertTrue( docker.utils.compare_version('1.10', self.client._version) >= 0 ) return args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data'])['VolumesFrom'], ','.join(vol_names)) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) def test_create_container_empty_volumes_from(self): with pytest.raises(docker.errors.InvalidVersion): self.client.create_container('busybox', 'true', volumes_from=[]) def test_create_named_container(self): self.client.create_container('busybox', 'true', name='marisa-kirisame') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["true"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "StdinOnce": false, "OpenStdin": false, "NetworkDisabled": false}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual(args[1]['params'], {'name': 'marisa-kirisame'}) def test_create_container_with_mem_limit_as_int(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( mem_limit=128.0 ) ) args = fake_request.call_args data = json.loads(args[1]['data']) self.assertEqual(data['HostConfig']['Memory'], 128.0) def test_create_container_with_mem_limit_as_string(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( mem_limit='128' ) ) args = fake_request.call_args data = json.loads(args[1]['data']) self.assertEqual(data['HostConfig']['Memory'], 128.0) def test_create_container_with_mem_limit_as_string_with_k_unit(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( mem_limit='128k' ) ) args = fake_request.call_args data = json.loads(args[1]['data']) self.assertEqual(data['HostConfig']['Memory'], 128.0 * 1024) def test_create_container_with_mem_limit_as_string_with_m_unit(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( mem_limit='128m' ) ) args = fake_request.call_args data = json.loads(args[1]['data']) self.assertEqual(data['HostConfig']['Memory'], 128.0 * 1024 * 1024) def test_create_container_with_mem_limit_as_string_with_g_unit(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( mem_limit='128g' ) ) args = fake_request.call_args data = json.loads(args[1]['data']) self.assertEqual( data['HostConfig']['Memory'], 128.0 * 1024 * 1024 * 1024 ) def test_create_container_with_mem_limit_as_string_with_wrong_value(self): self.assertRaises( docker.errors.DockerException, self.client.create_host_config, mem_limit='128p' ) self.assertRaises( docker.errors.DockerException, self.client.create_host_config, mem_limit='1f28' ) def test_create_container_with_lxc_conf(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( lxc_conf={'lxc.conf.k': 'lxc.conf.value'} ) ) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/create' ) expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['LxcConf'] = [ {"Value": "lxc.conf.value", "Key": "lxc.conf.k"} ] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_lxc_conf_compat(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( lxc_conf=[{'Key': 'lxc.conf.k', 'Value': 'lxc.conf.value'}] ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['LxcConf'] = [ {"Value": "lxc.conf.value", "Key": "lxc.conf.k"} ] self.assertEqual( json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_binds_ro(self): mount_dest = '/mnt' mount_origin = '/tmp' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( binds={mount_origin: { "bind": mount_dest, "ro": True }} ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:ro"] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_binds_rw(self): mount_dest = '/mnt' mount_origin = '/tmp' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( binds={mount_origin: { "bind": mount_dest, "ro": False }} ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:rw"] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_binds_mode(self): mount_dest = '/mnt' mount_origin = '/tmp' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( binds={mount_origin: { "bind": mount_dest, "mode": "z", }} ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:z"] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_binds_mode_and_ro_error(self): with pytest.raises(ValueError): mount_dest = '/mnt' mount_origin = '/tmp' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( binds={mount_origin: { "bind": mount_dest, "mode": "z", "ro": True, }} ) ) def test_create_container_with_binds_list(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( binds=[ "/tmp:/mnt/1:ro", "/tmp:/mnt/2", ], ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Binds'] = [ "/tmp:/mnt/1:ro", "/tmp:/mnt/2", ] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_port_binds(self): self.maxDiff = None self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( port_bindings={ 1111: None, 2222: 2222, '3333/udp': (3333,), 4444: ('127.0.0.1',), 5555: ('127.0.0.1', 5555), 6666: [('127.0.0.1',), ('192.168.0.1',)] } ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') data = json.loads(args[1]['data']) port_bindings = data['HostConfig']['PortBindings'] self.assertTrue('1111/tcp' in port_bindings) self.assertTrue('2222/tcp' in port_bindings) self.assertTrue('3333/udp' in port_bindings) self.assertTrue('4444/tcp' in port_bindings) self.assertTrue('5555/tcp' in port_bindings) self.assertTrue('6666/tcp' in port_bindings) self.assertEqual( [{"HostPort": "", "HostIp": ""}], port_bindings['1111/tcp'] ) self.assertEqual( [{"HostPort": "2222", "HostIp": ""}], port_bindings['2222/tcp'] ) self.assertEqual( [{"HostPort": "3333", "HostIp": ""}], port_bindings['3333/udp'] ) self.assertEqual( [{"HostPort": "", "HostIp": "127.0.0.1"}], port_bindings['4444/tcp'] ) self.assertEqual( [{"HostPort": "5555", "HostIp": "127.0.0.1"}], port_bindings['5555/tcp'] ) self.assertEqual(len(port_bindings['6666/tcp']), 2) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_mac_address(self): expected = "02:42:ac:11:00:0a" self.client.create_container( 'busybox', ['sleep', '60'], mac_address=expected ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') data = json.loads(args[1]['data']) assert data['MacAddress'] == expected def test_create_container_with_links(self): link_path = 'path' alias = 'alias' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( links={link_path: alias} ) ) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/create' ) expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Links'] = ['path:alias'] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) def test_create_container_with_multiple_links(self): link_path = 'path' alias = 'alias' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( links={ link_path + '1': alias + '1', link_path + '2': alias + '2' } ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Links'] = [ 'path1:alias1', 'path2:alias2' ] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) def test_create_container_with_links_as_list_of_tuples(self): link_path = 'path' alias = 'alias' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( links=[(link_path, alias)] ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Links'] = ['path:alias'] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) def test_create_container_privileged(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config(privileged=True) ) expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Privileged'] = True args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_restart_policy(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( restart_policy={ "Name": "always", "MaximumRetryCount": 0 } ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['RestartPolicy'] = { "MaximumRetryCount": 0, "Name": "always" } self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_added_capabilities(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config(cap_add=['MKNOD']) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['CapAdd'] = ['MKNOD'] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_dropped_capabilities(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config(cap_drop=['MKNOD']) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['CapDrop'] = ['MKNOD'] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_devices(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( devices=['/dev/sda:/dev/xvda:rwm', '/dev/sdb:/dev/xvdb', '/dev/sdc'] ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Devices'] = [ {'CgroupPermissions': 'rwm', 'PathInContainer': '/dev/xvda', 'PathOnHost': '/dev/sda'}, {'CgroupPermissions': 'rwm', 'PathInContainer': '/dev/xvdb', 'PathOnHost': '/dev/sdb'}, {'CgroupPermissions': 'rwm', 'PathInContainer': '/dev/sdc', 'PathOnHost': '/dev/sdc'} ] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_labels_dict(self): labels_dict = { six.text_type('foo'): six.text_type('1'), six.text_type('bar'): six.text_type('2'), } self.client.create_container( 'busybox', 'true', labels=labels_dict, ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data'])['Labels'], labels_dict) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_labels_list(self): labels_list = [ six.text_type('foo'), six.text_type('bar'), ] labels_dict = { six.text_type('foo'): six.text_type(), six.text_type('bar'): six.text_type(), } self.client.create_container( 'busybox', 'true', labels=labels_list, ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data'])['Labels'], labels_dict) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_named_volume(self): mount_dest = '/mnt' volume_name = 'name' self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( volume_driver='foodriver', binds={volume_name: { "bind": mount_dest, "ro": False }}), ) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/create' ) expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['VolumeDriver'] = 'foodriver' expected_payload['HostConfig']['Binds'] = ["name:/mnt:rw"] self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_stop_signal(self): self.client.create_container('busybox', 'ls', stop_signal='SIGINT') args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "StopSignal": "SIGINT"}''')) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) @requires_api_version('1.22') def test_create_container_with_aliases(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( network_mode='some-network', ), networking_config=self.client.create_networking_config({ 'some-network': self.client.create_endpoint_config( aliases=['foo', 'bar'], ), }), ) args = fake_request.call_args self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "HostConfig": { "NetworkMode": "some-network" }, "NetworkingConfig": { "EndpointsConfig": { "some-network": {"Aliases": ["foo", "bar"]} } }}''')) @requires_api_version('1.22') def test_create_container_with_tmpfs_list(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( tmpfs=[ "/tmp", "/mnt:size=3G,uid=100" ] ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Tmpfs'] = { "/tmp": "", "/mnt": "size=3G,uid=100" } self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) @requires_api_version('1.22') def test_create_container_with_tmpfs_dict(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( tmpfs={ "/tmp": "", "/mnt": "size=3G,uid=100" } ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Tmpfs'] = { "/tmp": "", "/mnt": "size=3G,uid=100" } self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual(args[1]['headers'], {'Content-Type': 'application/json'}) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) @requires_api_version('1.24') def test_create_container_with_sysctl(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( sysctls={ 'net.core.somaxconn': 1024, 'net.ipv4.tcp_syncookies': '0', } ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') expected_payload = self.base_create_payload() expected_payload['HostConfig'] = self.client.create_host_config() expected_payload['HostConfig']['Sysctls'] = { 'net.core.somaxconn': '1024', 'net.ipv4.tcp_syncookies': '0', } self.assertEqual(json.loads(args[1]['data']), expected_payload) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) self.assertEqual( args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS ) def test_create_container_with_unicode_envvars(self): envvars_dict = { 'foo': u'☃', } expected = [ u'foo=☃' ] self.client.create_container( 'busybox', 'true', environment=envvars_dict, ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data'])['Env'], expected) @requires_api_version('1.25') def test_create_container_with_host_config_cpus(self): self.client.create_container( 'busybox', 'ls', host_config=self.client.create_host_config( cpu_count=1, cpu_percent=20, nano_cpus=1000 ) ) args = fake_request.call_args self.assertEqual(args[0][1], url_prefix + 'containers/create') self.assertEqual(json.loads(args[1]['data']), json.loads(''' {"Tty": false, "Image": "busybox", "Cmd": ["ls"], "AttachStdin": false, "AttachStderr": true, "AttachStdout": true, "OpenStdin": false, "StdinOnce": false, "NetworkDisabled": false, "HostConfig": { "CpuCount": 1, "CpuPercent": 20, "NanoCpus": 1000, "NetworkMode": "default" }}''')) self.assertEqual( args[1]['headers'], {'Content-Type': 'application/json'} ) class ContainerTest(BaseAPIClientTest): def test_list_containers(self): self.client.containers(all=True) fake_request.assert_called_with( 'GET', url_prefix + 'containers/json', params={ 'all': 1, 'since': None, 'size': 0, 'limit': -1, 'trunc_cmd': 0, 'before': None }, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_resize_container(self): self.client.resize( {'Id': fake_api.FAKE_CONTAINER_ID}, height=15, width=120 ) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/resize', params={'h': 15, 'w': 120}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_rename_container(self): self.client.rename( {'Id': fake_api.FAKE_CONTAINER_ID}, name='foobar' ) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/rename', params={'name': 'foobar'}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_wait(self): self.client.wait(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/wait', timeout=None ) def test_wait_with_dict_instead_of_id(self): self.client.wait({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/wait', timeout=None ) def test_logs(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): logs = self.client.logs(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) self.assertEqual( logs, 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') ) def test_logs_with_dict_instead_of_id(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): logs = self.client.logs({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) self.assertEqual( logs, 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') ) def test_log_streaming(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True, follow=False) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=True ) def test_log_following(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, follow=True) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) def test_log_following_backwards(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=True ) def test_log_streaming_and_following(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True, follow=True) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=True ) def test_log_tail(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, follow=False, tail=10) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 10}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) def test_log_since(self): ts = 809222400 with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, follow=False, since=ts) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all', 'since': ts}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) def test_log_since_with_datetime(self): ts = 809222400 time = datetime.datetime.utcfromtimestamp(ts) with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, follow=False, since=time) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all', 'since': ts}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=False ) def test_log_since_with_invalid_value_raises_error(self): with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container): with self.assertRaises(docker.errors.InvalidArgument): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, follow=False, since=42.42) def test_log_tty(self): m = mock.Mock() with mock.patch('docker.api.client.APIClient.inspect_container', fake_inspect_container_tty): with mock.patch('docker.api.client.APIClient._stream_raw_result', m): self.client.logs(fake_api.FAKE_CONTAINER_ID, follow=True, stream=True) self.assertTrue(m.called) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/logs', params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, stream=True ) def test_diff(self): self.client.diff(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/changes', timeout=DEFAULT_TIMEOUT_SECONDS ) def test_diff_with_dict_instead_of_id(self): self.client.diff({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/changes', timeout=DEFAULT_TIMEOUT_SECONDS ) def test_port(self): self.client.port({'Id': fake_api.FAKE_CONTAINER_ID}, 1111) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/json', timeout=DEFAULT_TIMEOUT_SECONDS ) def test_stop_container(self): timeout = 2 self.client.stop(fake_api.FAKE_CONTAINER_ID, timeout=timeout) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/stop', params={'t': timeout}, timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) ) def test_stop_container_with_dict_instead_of_id(self): timeout = 2 self.client.stop({'Id': fake_api.FAKE_CONTAINER_ID}, timeout=timeout) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/stop', params={'t': timeout}, timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) ) def test_pause_container(self): self.client.pause(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/pause', timeout=(DEFAULT_TIMEOUT_SECONDS) ) def test_unpause_container(self): self.client.unpause(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/unpause', timeout=(DEFAULT_TIMEOUT_SECONDS) ) def test_kill_container(self): self.client.kill(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/kill', params={}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_kill_container_with_dict_instead_of_id(self): self.client.kill({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/kill', params={}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_kill_container_with_signal(self): self.client.kill(fake_api.FAKE_CONTAINER_ID, signal=signal.SIGTERM) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/kill', params={'signal': signal.SIGTERM}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_restart_container(self): self.client.restart(fake_api.FAKE_CONTAINER_ID, timeout=2) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/restart', params={'t': 2}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_restart_container_with_dict_instead_of_id(self): self.client.restart({'Id': fake_api.FAKE_CONTAINER_ID}, timeout=2) fake_request.assert_called_with( 'POST', url_prefix + 'containers/3cc2351ab11b/restart', params={'t': 2}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_remove_container(self): self.client.remove_container(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'DELETE', url_prefix + 'containers/3cc2351ab11b', params={'v': False, 'link': False, 'force': False}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_remove_container_with_dict_instead_of_id(self): self.client.remove_container({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'DELETE', url_prefix + 'containers/3cc2351ab11b', params={'v': False, 'link': False, 'force': False}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_export(self): self.client.export(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/export', stream=True, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_export_with_dict_instead_of_id(self): self.client.export({'Id': fake_api.FAKE_CONTAINER_ID}) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/export', stream=True, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_inspect_container(self): self.client.inspect_container(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/json', timeout=DEFAULT_TIMEOUT_SECONDS ) def test_inspect_container_undefined_id(self): for arg in None, '', {True: True}: with pytest.raises(docker.errors.NullResource) as excinfo: self.client.inspect_container(arg) self.assertEqual( excinfo.value.args[0], 'Resource ID was not provided' ) def test_container_stats(self): self.client.stats(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/stats', timeout=60, stream=True ) def test_container_top(self): self.client.top(fake_api.FAKE_CONTAINER_ID) fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/top', params={}, timeout=DEFAULT_TIMEOUT_SECONDS ) def test_container_top_with_psargs(self): self.client.top(fake_api.FAKE_CONTAINER_ID, 'waux') fake_request.assert_called_with( 'GET', url_prefix + 'containers/3cc2351ab11b/top', params={'ps_args': 'waux'}, timeout=DEFAULT_TIMEOUT_SECONDS ) @requires_api_version('1.22') def test_container_update(self): self.client.update_container( fake_api.FAKE_CONTAINER_ID, mem_limit='2k', cpu_shares=124, blkio_weight=345 ) args = fake_request.call_args self.assertEqual( args[0][1], url_prefix + 'containers/3cc2351ab11b/update' ) self.assertEqual( json.loads(args[1]['data']), {'Memory': 2 * 1024, 'CpuShares': 124, 'BlkioWeight': 345} ) self.assertEqual( args[1]['headers']['Content-Type'], 'application/json' )
shakamunyi/docker-py
tests/unit/api_container_test.py
docker/transport/__init__.py
"""Support for monitoring juicenet/juicepoint/juicebox based EVSE switches.""" from homeassistant.components.switch import SwitchEntity from .const import DOMAIN, JUICENET_API, JUICENET_COORDINATOR from .entity import JuiceNetDevice async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the JuiceNet switches.""" entities = [] juicenet_data = hass.data[DOMAIN][config_entry.entry_id] api = juicenet_data[JUICENET_API] coordinator = juicenet_data[JUICENET_COORDINATOR] for device in api.devices: entities.append(JuiceNetChargeNowSwitch(device, coordinator)) async_add_entities(entities) class JuiceNetChargeNowSwitch(JuiceNetDevice, SwitchEntity): """Implementation of a JuiceNet switch.""" def __init__(self, device, coordinator): """Initialise the switch.""" super().__init__(device, "charge_now", coordinator) @property def name(self): """Return the name of the device.""" return f"{self.device.name} Charge Now" @property def is_on(self): """Return true if switch is on.""" return self.device.override_time != 0 async def async_turn_on(self, **kwargs): """Charge now.""" await self.device.set_override(True) async def async_turn_off(self, **kwargs): """Don't charge now.""" await self.device.set_override(False)
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/juicenet/switch.py
"""Support for Melissa Climate A/C.""" import logging from homeassistant.components.climate import ClimateEntity from homeassistant.components.climate.const import ( FAN_AUTO, FAN_HIGH, FAN_LOW, FAN_MEDIUM, HVAC_MODE_AUTO, HVAC_MODE_COOL, HVAC_MODE_DRY, HVAC_MODE_FAN_ONLY, HVAC_MODE_HEAT, HVAC_MODE_OFF, SUPPORT_FAN_MODE, SUPPORT_TARGET_TEMPERATURE, ) from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, TEMP_CELSIUS from . import DATA_MELISSA _LOGGER = logging.getLogger(__name__) SUPPORT_FLAGS = SUPPORT_FAN_MODE | SUPPORT_TARGET_TEMPERATURE OP_MODES = [ HVAC_MODE_HEAT, HVAC_MODE_COOL, HVAC_MODE_DRY, HVAC_MODE_FAN_ONLY, HVAC_MODE_OFF, ] FAN_MODES = [FAN_AUTO, FAN_HIGH, FAN_MEDIUM, FAN_LOW] async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Iterate through and add all Melissa devices.""" api = hass.data[DATA_MELISSA] devices = (await api.async_fetch_devices()).values() all_devices = [] for device in devices: if device["type"] == "melissa": all_devices.append(MelissaClimate(api, device["serial_number"], device)) async_add_entities(all_devices) class MelissaClimate(ClimateEntity): """Representation of a Melissa Climate device.""" def __init__(self, api, serial_number, init_data): """Initialize the climate device.""" self._name = init_data["name"] self._api = api self._serial_number = serial_number self._data = init_data["controller_log"] self._state = None self._cur_settings = None @property def name(self): """Return the name of the thermostat, if any.""" return self._name @property def fan_mode(self): """Return the current fan mode.""" if self._cur_settings is not None: return self.melissa_fan_to_hass(self._cur_settings[self._api.FAN]) @property def current_temperature(self): """Return the current temperature.""" if self._data: return self._data[self._api.TEMP] @property def current_humidity(self): """Return the current humidity value.""" if self._data: return self._data[self._api.HUMIDITY] @property def target_temperature_step(self): """Return the supported step of target temperature.""" return PRECISION_WHOLE @property def hvac_mode(self): """Return the current operation mode.""" if self._cur_settings is None: return None is_on = self._cur_settings[self._api.STATE] in ( self._api.STATE_ON, self._api.STATE_IDLE, ) if not is_on: return HVAC_MODE_OFF return self.melissa_op_to_hass(self._cur_settings[self._api.MODE]) @property def hvac_modes(self): """Return the list of available operation modes.""" return OP_MODES @property def fan_modes(self): """List of available fan modes.""" return FAN_MODES @property def target_temperature(self): """Return the temperature we try to reach.""" if self._cur_settings is None: return None return self._cur_settings[self._api.TEMP] @property def temperature_unit(self): """Return the unit of measurement which this thermostat uses.""" return TEMP_CELSIUS @property def min_temp(self): """Return the minimum supported temperature for the thermostat.""" return 16 @property def max_temp(self): """Return the maximum supported temperature for the thermostat.""" return 30 @property def supported_features(self): """Return the list of supported features.""" return SUPPORT_FLAGS async def async_set_temperature(self, **kwargs): """Set new target temperature.""" temp = kwargs.get(ATTR_TEMPERATURE) await self.async_send({self._api.TEMP: temp}) async def async_set_fan_mode(self, fan_mode): """Set fan mode.""" melissa_fan_mode = self.hass_fan_to_melissa(fan_mode) await self.async_send({self._api.FAN: melissa_fan_mode}) async def async_set_hvac_mode(self, hvac_mode): """Set operation mode.""" if hvac_mode == HVAC_MODE_OFF: await self.async_send({self._api.STATE: self._api.STATE_OFF}) return mode = self.hass_mode_to_melissa(hvac_mode) await self.async_send( {self._api.MODE: mode, self._api.STATE: self._api.STATE_ON} ) async def async_send(self, value): """Send action to service.""" try: old_value = self._cur_settings.copy() self._cur_settings.update(value) except AttributeError: old_value = None if not await self._api.async_send( self._serial_number, "melissa", self._cur_settings ): self._cur_settings = old_value async def async_update(self): """Get latest data from Melissa.""" try: self._data = (await self._api.async_status(cached=True))[ self._serial_number ] self._cur_settings = ( await self._api.async_cur_settings(self._serial_number) )["controller"]["_relation"]["command_log"] except KeyError: _LOGGER.warning("Unable to update entity %s", self.entity_id) def melissa_op_to_hass(self, mode): """Translate Melissa modes to hass states.""" if mode == self._api.MODE_HEAT: return HVAC_MODE_HEAT if mode == self._api.MODE_COOL: return HVAC_MODE_COOL if mode == self._api.MODE_DRY: return HVAC_MODE_DRY if mode == self._api.MODE_FAN: return HVAC_MODE_FAN_ONLY _LOGGER.warning("Operation mode %s could not be mapped to hass", mode) return None def melissa_fan_to_hass(self, fan): """Translate Melissa fan modes to hass modes.""" if fan == self._api.FAN_AUTO: return HVAC_MODE_AUTO if fan == self._api.FAN_LOW: return FAN_LOW if fan == self._api.FAN_MEDIUM: return FAN_MEDIUM if fan == self._api.FAN_HIGH: return FAN_HIGH _LOGGER.warning("Fan mode %s could not be mapped to hass", fan) return None def hass_mode_to_melissa(self, mode): """Translate hass states to melissa modes.""" if mode == HVAC_MODE_HEAT: return self._api.MODE_HEAT if mode == HVAC_MODE_COOL: return self._api.MODE_COOL if mode == HVAC_MODE_DRY: return self._api.MODE_DRY if mode == HVAC_MODE_FAN_ONLY: return self._api.MODE_FAN _LOGGER.warning("Melissa have no setting for %s mode", mode) def hass_fan_to_melissa(self, fan): """Translate hass fan modes to melissa modes.""" if fan == HVAC_MODE_AUTO: return self._api.FAN_AUTO if fan == FAN_LOW: return self._api.FAN_LOW if fan == FAN_MEDIUM: return self._api.FAN_MEDIUM if fan == FAN_HIGH: return self._api.FAN_HIGH _LOGGER.warning("Melissa have no setting for %s fan mode", fan)
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/melissa/climate.py
"""Support for VELUX KLF 200 devices.""" import logging from pyvlx import PyVLX, PyVLXException import voluptuous as vol from homeassistant.const import CONF_HOST, CONF_PASSWORD, EVENT_HOMEASSISTANT_STOP from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv DOMAIN = "velux" DATA_VELUX = "data_velux" SUPPORTED_DOMAINS = ["cover", "scene"] _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( {vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PASSWORD): cv.string} ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the velux component.""" try: hass.data[DATA_VELUX] = VeluxModule(hass, config[DOMAIN]) hass.data[DATA_VELUX].setup() await hass.data[DATA_VELUX].async_start() except PyVLXException as ex: _LOGGER.exception("Can't connect to velux interface: %s", ex) return False for component in SUPPORTED_DOMAINS: hass.async_create_task( discovery.async_load_platform(hass, component, DOMAIN, {}, config) ) return True class VeluxModule: """Abstraction for velux component.""" def __init__(self, hass, domain_config): """Initialize for velux component.""" self.pyvlx = None self._hass = hass self._domain_config = domain_config def setup(self): """Velux component setup.""" async def on_hass_stop(event): """Close connection when hass stops.""" _LOGGER.debug("Velux interface terminated") await self.pyvlx.disconnect() async def async_reboot_gateway(service_call): await self.pyvlx.reboot_gateway() self._hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop) host = self._domain_config.get(CONF_HOST) password = self._domain_config.get(CONF_PASSWORD) self.pyvlx = PyVLX(host=host, password=password) self._hass.services.async_register( DOMAIN, "reboot_gateway", async_reboot_gateway ) async def async_start(self): """Start velux component.""" _LOGGER.debug("Velux interface started") await self.pyvlx.load_scenes() await self.pyvlx.load_nodes()
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/velux/__init__.py
"""Insteon base entity.""" import functools import logging from pyinsteon import devices from homeassistant.core import callback from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import Entity from .const import ( DOMAIN, SIGNAL_ADD_DEFAULT_LINKS, SIGNAL_LOAD_ALDB, SIGNAL_PRINT_ALDB, SIGNAL_REMOVE_ENTITY, SIGNAL_SAVE_DEVICES, STATE_NAME_LABEL_MAP, ) from .utils import print_aldb_to_log _LOGGER = logging.getLogger(__name__) class InsteonEntity(Entity): """INSTEON abstract base entity.""" def __init__(self, device, group): """Initialize the INSTEON binary sensor.""" self._insteon_device_group = device.groups[group] self._insteon_device = device def __hash__(self): """Return the hash of the Insteon Entity.""" return hash(self._insteon_device) @property def should_poll(self): """No polling needed.""" return False @property def address(self): """Return the address of the node.""" return str(self._insteon_device.address) @property def group(self): """Return the INSTEON group that the entity responds to.""" return self._insteon_device_group.group @property def unique_id(self) -> str: """Return a unique ID.""" if self._insteon_device_group.group == 0x01: uid = self._insteon_device.id else: uid = f"{self._insteon_device.id}_{self._insteon_device_group.group}" return uid @property def name(self): """Return the name of the node (used for Entity_ID).""" # Set a base description description = self._insteon_device.description if description is None: description = "Unknown Device" # Get an extension label if there is one extension = self._get_label() if extension: extension = f" {extension}" return f"{description} {self._insteon_device.address}{extension}" @property def device_state_attributes(self): """Provide attributes for display on device card.""" return {"insteon_address": self.address, "insteon_group": self.group} @property def device_info(self): """Return device information.""" return { "identifiers": {(DOMAIN, str(self._insteon_device.address))}, "name": f"{self._insteon_device.description} {self._insteon_device.address}", "model": f"{self._insteon_device.model} ({self._insteon_device.cat!r}, 0x{self._insteon_device.subcat:02x})", "sw_version": f"{self._insteon_device.firmware:02x} Engine Version: {self._insteon_device.engine_version}", "manufacturer": "Smart Home", "via_device": (DOMAIN, str(devices.modem.address)), } @callback def async_entity_update(self, name, address, value, group): """Receive notification from transport that new data exists.""" _LOGGER.debug( "Received update for device %s group %d value %s", address, group, value, ) self.async_write_ha_state() async def async_added_to_hass(self): """Register INSTEON update events.""" _LOGGER.debug( "Tracking updates for device %s group %d name %s", self.address, self.group, self._insteon_device_group.name, ) self._insteon_device_group.subscribe(self.async_entity_update) load_signal = f"{self.entity_id}_{SIGNAL_LOAD_ALDB}" self.async_on_remove( async_dispatcher_connect(self.hass, load_signal, self._async_read_aldb) ) print_signal = f"{self.entity_id}_{SIGNAL_PRINT_ALDB}" async_dispatcher_connect(self.hass, print_signal, self._print_aldb) default_links_signal = f"{self.entity_id}_{SIGNAL_ADD_DEFAULT_LINKS}" async_dispatcher_connect( self.hass, default_links_signal, self._async_add_default_links ) remove_signal = f"{self._insteon_device.address.id}_{SIGNAL_REMOVE_ENTITY}" self.async_on_remove( async_dispatcher_connect( self.hass, remove_signal, functools.partial(self.async_remove, force_remove=True), ) ) async def async_will_remove_from_hass(self): """Unsubscribe to INSTEON update events.""" _LOGGER.debug( "Remove tracking updates for device %s group %d name %s", self.address, self.group, self._insteon_device_group.name, ) self._insteon_device_group.unsubscribe(self.async_entity_update) async def _async_read_aldb(self, reload): """Call device load process and print to log.""" await self._insteon_device.aldb.async_load(refresh=reload) self._print_aldb() async_dispatcher_send(self.hass, SIGNAL_SAVE_DEVICES) def _print_aldb(self): """Print the device ALDB to the log file.""" print_aldb_to_log(self._insteon_device.aldb) def _get_label(self): """Get the device label for grouped devices.""" label = "" if len(self._insteon_device.groups) > 1: if self._insteon_device_group.name in STATE_NAME_LABEL_MAP: label = STATE_NAME_LABEL_MAP[self._insteon_device_group.name] else: label = f"Group {self.group:d}" return label async def _async_add_default_links(self): """Add default links between the device and the modem.""" await self._insteon_device.async_add_default_links()
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/insteon/insteon_entity.py
"""Config flow to configure the Toon component.""" import logging from typing import Any, Dict, List, Optional from toonapi import Agreement, Toon, ToonError import voluptuous as vol from homeassistant import config_entries from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler from .const import CONF_AGREEMENT, CONF_AGREEMENT_ID, CONF_MIGRATE, DOMAIN class ToonFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): """Handle a Toon config flow.""" CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_PUSH DOMAIN = DOMAIN VERSION = 2 agreements: Optional[List[Agreement]] = None data: Optional[Dict[str, Any]] = None @property def logger(self) -> logging.Logger: """Return logger.""" return logging.getLogger(__name__) async def async_oauth_create_entry(self, data: Dict[str, Any]) -> Dict[str, Any]: """Test connection and load up agreements.""" self.data = data toon = Toon( token=self.data["token"]["access_token"], session=async_get_clientsession(self.hass), ) try: self.agreements = await toon.agreements() except ToonError: return self.async_abort(reason="connection_error") if not self.agreements: return self.async_abort(reason="no_agreements") return await self.async_step_agreement() async def async_step_import( self, config: Optional[Dict[str, Any]] = None ) -> Dict[str, Any]: """Start a configuration flow based on imported data. This step is merely here to trigger "discovery" when the `toon` integration is listed in the user configuration, or when migrating from the version 1 schema. """ if config is not None and CONF_MIGRATE in config: self.context.update({CONF_MIGRATE: config[CONF_MIGRATE]}) else: await self._async_handle_discovery_without_unique_id() return await self.async_step_user() async def async_step_agreement( self, user_input: Dict[str, Any] = None ) -> Dict[str, Any]: """Select Toon agreement to add.""" if len(self.agreements) == 1: return await self._create_entry(self.agreements[0]) agreements_list = [ f"{agreement.street} {agreement.house_number}, {agreement.city}" for agreement in self.agreements ] if user_input is None: return self.async_show_form( step_id="agreement", data_schema=vol.Schema( {vol.Required(CONF_AGREEMENT): vol.In(agreements_list)} ), ) agreement_index = agreements_list.index(user_input[CONF_AGREEMENT]) return await self._create_entry(self.agreements[agreement_index]) async def _create_entry(self, agreement: Agreement) -> Dict[str, Any]: if CONF_MIGRATE in self.context: await self.hass.config_entries.async_remove(self.context[CONF_MIGRATE]) await self.async_set_unique_id(agreement.agreement_id) self._abort_if_unique_id_configured() self.data[CONF_AGREEMENT_ID] = agreement.agreement_id return self.async_create_entry( title=f"{agreement.street} {agreement.house_number}, {agreement.city}", data=self.data, )
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/toon/config_flow.py