input
stringlengths
53
297k
output
stringclasses
604 values
repo_name
stringclasses
376 values
test_path
stringclasses
583 values
code_path
stringlengths
7
116
"""Support for Sonarr sensors.""" from datetime import timedelta import logging from typing import Any, Callable, Dict, List, Optional from sonarr import Sonarr, SonarrConnectionError, SonarrError from homeassistant.config_entries import ConfigEntry from homeassistant.const import DATA_GIGABYTES from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import HomeAssistantType import homeassistant.util.dt as dt_util from . import SonarrEntity from .const import CONF_UPCOMING_DAYS, CONF_WANTED_MAX_ITEMS, DATA_SONARR, DOMAIN _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities: Callable[[List[Entity], bool], None], ) -> None: """Set up Sonarr sensors based on a config entry.""" options = entry.options sonarr = hass.data[DOMAIN][entry.entry_id][DATA_SONARR] entities = [ SonarrCommandsSensor(sonarr, entry.entry_id), SonarrDiskspaceSensor(sonarr, entry.entry_id), SonarrQueueSensor(sonarr, entry.entry_id), SonarrSeriesSensor(sonarr, entry.entry_id), SonarrUpcomingSensor(sonarr, entry.entry_id, days=options[CONF_UPCOMING_DAYS]), SonarrWantedSensor( sonarr, entry.entry_id, max_items=options[CONF_WANTED_MAX_ITEMS] ), ] async_add_entities(entities, True) def sonarr_exception_handler(func): """Decorate Sonarr calls to handle Sonarr exceptions. A decorator that wraps the passed in function, catches Sonarr errors, and handles the availability of the entity. """ async def handler(self, *args, **kwargs): try: await func(self, *args, **kwargs) self.last_update_success = True except SonarrConnectionError as error: if self.available: _LOGGER.error("Error communicating with API: %s", error) self.last_update_success = False except SonarrError as error: if self.available: _LOGGER.error("Invalid response from API: %s", error) self.last_update_success = False return handler class SonarrSensor(SonarrEntity): """Implementation of the Sonarr sensor.""" def __init__( self, *, sonarr: Sonarr, entry_id: str, enabled_default: bool = True, icon: str, key: str, name: str, unit_of_measurement: Optional[str] = None, ) -> None: """Initialize Sonarr sensor.""" self._unit_of_measurement = unit_of_measurement self._key = key self._unique_id = f"{entry_id}_{key}" self.last_update_success = False super().__init__( sonarr=sonarr, entry_id=entry_id, device_id=entry_id, name=name, icon=icon, enabled_default=enabled_default, ) @property def unique_id(self) -> str: """Return the unique ID for this sensor.""" return self._unique_id @property def available(self) -> bool: """Return sensor availability.""" return self.last_update_success @property def unit_of_measurement(self) -> str: """Return the unit this state is expressed in.""" return self._unit_of_measurement class SonarrCommandsSensor(SonarrSensor): """Defines a Sonarr Commands sensor.""" def __init__(self, sonarr: Sonarr, entry_id: str) -> None: """Initialize Sonarr Commands sensor.""" self._commands = [] super().__init__( sonarr=sonarr, entry_id=entry_id, icon="mdi:code-braces", key="commands", name=f"{sonarr.app.info.app_name} Commands", unit_of_measurement="Commands", enabled_default=False, ) @sonarr_exception_handler async def async_update(self) -> None: """Update entity.""" self._commands = await self.sonarr.commands() @property def device_state_attributes(self) -> Optional[Dict[str, Any]]: """Return the state attributes of the entity.""" attrs = {} for command in self._commands: attrs[command.name] = command.state return attrs @property def state(self) -> int: """Return the state of the sensor.""" return len(self._commands) class SonarrDiskspaceSensor(SonarrSensor): """Defines a Sonarr Disk Space sensor.""" def __init__(self, sonarr: Sonarr, entry_id: str) -> None: """Initialize Sonarr Disk Space sensor.""" self._disks = [] self._total_free = 0 super().__init__( sonarr=sonarr, entry_id=entry_id, icon="mdi:harddisk", key="diskspace", name=f"{sonarr.app.info.app_name} Disk Space", unit_of_measurement=DATA_GIGABYTES, enabled_default=False, ) @sonarr_exception_handler async def async_update(self) -> None: """Update entity.""" app = await self.sonarr.update() self._disks = app.disks self._total_free = sum([disk.free for disk in self._disks]) @property def device_state_attributes(self) -> Optional[Dict[str, Any]]: """Return the state attributes of the entity.""" attrs = {} for disk in self._disks: free = disk.free / 1024 ** 3 total = disk.total / 1024 ** 3 usage = free / total * 100 attrs[ disk.path ] = f"{free:.2f}/{total:.2f}{self._unit_of_measurement} ({usage:.2f}%)" return attrs @property def state(self) -> str: """Return the state of the sensor.""" free = self._total_free / 1024 ** 3 return f"{free:.2f}" class SonarrQueueSensor(SonarrSensor): """Defines a Sonarr Queue sensor.""" def __init__(self, sonarr: Sonarr, entry_id: str) -> None: """Initialize Sonarr Queue sensor.""" self._queue = [] super().__init__( sonarr=sonarr, entry_id=entry_id, icon="mdi:download", key="queue", name=f"{sonarr.app.info.app_name} Queue", unit_of_measurement="Episodes", enabled_default=False, ) @sonarr_exception_handler async def async_update(self) -> None: """Update entity.""" self._queue = await self.sonarr.queue() @property def device_state_attributes(self) -> Optional[Dict[str, Any]]: """Return the state attributes of the entity.""" attrs = {} for item in self._queue: remaining = 1 if item.size == 0 else item.size_remaining / item.size remaining_pct = 100 * (1 - remaining) name = f"{item.episode.series.title} {item.episode.identifier}" attrs[name] = f"{remaining_pct:.2f}%" return attrs @property def state(self) -> int: """Return the state of the sensor.""" return len(self._queue) class SonarrSeriesSensor(SonarrSensor): """Defines a Sonarr Series sensor.""" def __init__(self, sonarr: Sonarr, entry_id: str) -> None: """Initialize Sonarr Series sensor.""" self._items = [] super().__init__( sonarr=sonarr, entry_id=entry_id, icon="mdi:television", key="series", name=f"{sonarr.app.info.app_name} Shows", unit_of_measurement="Series", enabled_default=False, ) @sonarr_exception_handler async def async_update(self) -> None: """Update entity.""" self._items = await self.sonarr.series() @property def device_state_attributes(self) -> Optional[Dict[str, Any]]: """Return the state attributes of the entity.""" attrs = {} for item in self._items: attrs[item.series.title] = f"{item.downloaded}/{item.episodes} Episodes" return attrs @property def state(self) -> int: """Return the state of the sensor.""" return len(self._items) class SonarrUpcomingSensor(SonarrSensor): """Defines a Sonarr Upcoming sensor.""" def __init__(self, sonarr: Sonarr, entry_id: str, days: int = 1) -> None: """Initialize Sonarr Upcoming sensor.""" self._days = days self._upcoming = [] super().__init__( sonarr=sonarr, entry_id=entry_id, icon="mdi:television", key="upcoming", name=f"{sonarr.app.info.app_name} Upcoming", unit_of_measurement="Episodes", ) async def async_added_to_hass(self): """Listen for signals.""" await super().async_added_to_hass() self.async_on_remove( async_dispatcher_connect( self.hass, f"sonarr.{self._entry_id}.entry_options_update", self.async_update_entry_options, ) ) @sonarr_exception_handler async def async_update(self) -> None: """Update entity.""" local = dt_util.start_of_local_day().replace(microsecond=0) start = dt_util.as_utc(local) end = start + timedelta(days=self._days) self._upcoming = await self.sonarr.calendar( start=start.isoformat(), end=end.isoformat() ) async def async_update_entry_options(self, options: dict) -> None: """Update sensor settings when config entry options are update.""" self._days = options[CONF_UPCOMING_DAYS] @property def device_state_attributes(self) -> Optional[Dict[str, Any]]: """Return the state attributes of the entity.""" attrs = {} for episode in self._upcoming: attrs[episode.series.title] = episode.identifier return attrs @property def state(self) -> int: """Return the state of the sensor.""" return len(self._upcoming) class SonarrWantedSensor(SonarrSensor): """Defines a Sonarr Wanted sensor.""" def __init__(self, sonarr: Sonarr, entry_id: str, max_items: int = 10) -> None: """Initialize Sonarr Wanted sensor.""" self._max_items = max_items self._results = None self._total: Optional[int] = None super().__init__( sonarr=sonarr, entry_id=entry_id, icon="mdi:television", key="wanted", name=f"{sonarr.app.info.app_name} Wanted", unit_of_measurement="Episodes", enabled_default=False, ) async def async_added_to_hass(self): """Listen for signals.""" await super().async_added_to_hass() self.async_on_remove( async_dispatcher_connect( self.hass, f"sonarr.{self._entry_id}.entry_options_update", self.async_update_entry_options, ) ) @sonarr_exception_handler async def async_update(self) -> None: """Update entity.""" self._results = await self.sonarr.wanted(page_size=self._max_items) self._total = self._results.total async def async_update_entry_options(self, options: dict) -> None: """Update sensor settings when config entry options are update.""" self._max_items = options[CONF_WANTED_MAX_ITEMS] @property def device_state_attributes(self) -> Optional[Dict[str, Any]]: """Return the state attributes of the entity.""" attrs = {} if self._results is not None: for episode in self._results.episodes: name = f"{episode.series.title} {episode.identifier}" attrs[name] = episode.airdate return attrs @property def state(self) -> Optional[int]: """Return the state of the sensor.""" return self._total
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/sonarr/sensor.py
"""Config flow to configure Coolmaster.""" from pycoolmasternet_async import CoolMasterNet import voluptuous as vol from homeassistant import config_entries, core from homeassistant.const import CONF_HOST, CONF_PORT # pylint: disable=unused-import from .const import AVAILABLE_MODES, CONF_SUPPORTED_MODES, DEFAULT_PORT, DOMAIN MODES_SCHEMA = {vol.Required(mode, default=True): bool for mode in AVAILABLE_MODES} DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str, **MODES_SCHEMA}) async def _validate_connection(hass: core.HomeAssistant, host): cool = CoolMasterNet(host, DEFAULT_PORT) units = await cool.status() return bool(units) class CoolmasterConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a Coolmaster config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL @core.callback def _async_get_entry(self, data): supported_modes = [ key for (key, value) in data.items() if key in AVAILABLE_MODES and value ] return self.async_create_entry( title=data[CONF_HOST], data={ CONF_HOST: data[CONF_HOST], CONF_PORT: DEFAULT_PORT, CONF_SUPPORTED_MODES: supported_modes, }, ) async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" if user_input is None: return self.async_show_form(step_id="user", data_schema=DATA_SCHEMA) errors = {} host = user_input[CONF_HOST] try: result = await _validate_connection(self.hass, host) if not result: errors["base"] = "no_units" except (OSError, ConnectionRefusedError, TimeoutError): errors["base"] = "cannot_connect" if errors: return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors ) return self._async_get_entry(user_input)
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/coolmaster/config_flow.py
"""Reproduce an Remote state.""" import asyncio import logging from typing import Any, Dict, Iterable, Optional from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, State from homeassistant.helpers.typing import HomeAssistantType from . import DOMAIN _LOGGER = logging.getLogger(__name__) VALID_STATES = {STATE_ON, STATE_OFF} async def _async_reproduce_state( hass: HomeAssistantType, state: State, *, context: Optional[Context] = None, reproduce_options: Optional[Dict[str, Any]] = None, ) -> None: """Reproduce a single state.""" cur_state = hass.states.get(state.entity_id) if cur_state is None: _LOGGER.warning("Unable to find entity %s", state.entity_id) return if state.state not in VALID_STATES: _LOGGER.warning( "Invalid state specified for %s: %s", state.entity_id, state.state ) return # Return if we are already at the right state. if cur_state.state == state.state: return service_data = {ATTR_ENTITY_ID: state.entity_id} if state.state == STATE_ON: service = SERVICE_TURN_ON elif state.state == STATE_OFF: service = SERVICE_TURN_OFF await hass.services.async_call( DOMAIN, service, service_data, context=context, blocking=True ) async def async_reproduce_states( hass: HomeAssistantType, states: Iterable[State], *, context: Optional[Context] = None, reproduce_options: Optional[Dict[str, Any]] = None, ) -> None: """Reproduce Remote states.""" await asyncio.gather( *( _async_reproduce_state( hass, state, context=context, reproduce_options=reproduce_options ) for state in states ) )
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/remote/reproduce_state.py
"""Support for powering relays in a DoorBird video doorbell.""" import datetime from homeassistant.components.switch import SwitchEntity import homeassistant.util.dt as dt_util from .const import DOMAIN, DOOR_STATION, DOOR_STATION_INFO from .entity import DoorBirdEntity IR_RELAY = "__ir_light__" async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the DoorBird switch platform.""" entities = [] config_entry_id = config_entry.entry_id doorstation = hass.data[DOMAIN][config_entry_id][DOOR_STATION] doorstation_info = hass.data[DOMAIN][config_entry_id][DOOR_STATION_INFO] relays = doorstation_info["RELAYS"] relays.append(IR_RELAY) for relay in relays: switch = DoorBirdSwitch(doorstation, doorstation_info, relay) entities.append(switch) async_add_entities(entities) class DoorBirdSwitch(DoorBirdEntity, SwitchEntity): """A relay in a DoorBird device.""" def __init__(self, doorstation, doorstation_info, relay): """Initialize a relay in a DoorBird device.""" super().__init__(doorstation, doorstation_info) self._doorstation = doorstation self._relay = relay self._state = False self._assume_off = datetime.datetime.min if relay == IR_RELAY: self._time = datetime.timedelta(minutes=5) else: self._time = datetime.timedelta(seconds=5) self._unique_id = f"{self._mac_addr}_{self._relay}" @property def unique_id(self): """Switch unique id.""" return self._unique_id @property def name(self): """Return the name of the switch.""" if self._relay == IR_RELAY: return f"{self._doorstation.name} IR" return f"{self._doorstation.name} Relay {self._relay}" @property def icon(self): """Return the icon to display.""" return "mdi:lightbulb" if self._relay == IR_RELAY else "mdi:dip-switch" @property def is_on(self): """Get the assumed state of the relay.""" return self._state def turn_on(self, **kwargs): """Power the relay.""" if self._relay == IR_RELAY: self._state = self._doorstation.device.turn_light_on() else: self._state = self._doorstation.device.energize_relay(self._relay) now = dt_util.utcnow() self._assume_off = now + self._time def turn_off(self, **kwargs): """Turn off the relays is not needed. They are time-based.""" raise NotImplementedError("DoorBird relays cannot be manually turned off.") async def async_update(self): """Wait for the correct amount of assumed time to pass.""" if self._state and self._assume_off <= dt_util.utcnow(): self._state = False self._assume_off = datetime.datetime.min
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/doorbird/switch.py
"""Support for an Intergas heater via an InComfort/InTouch Lan2RF gateway.""" from typing import Any, Dict, Optional from homeassistant.components.binary_sensor import ( DOMAIN as BINARY_SENSOR_DOMAIN, BinarySensorEntity, ) from . import DOMAIN, IncomfortChild async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up an InComfort/InTouch binary_sensor device.""" if discovery_info is None: return client = hass.data[DOMAIN]["client"] heaters = hass.data[DOMAIN]["heaters"] async_add_entities([IncomfortFailed(client, h) for h in heaters]) class IncomfortFailed(IncomfortChild, BinarySensorEntity): """Representation of an InComfort Failed sensor.""" def __init__(self, client, heater) -> None: """Initialize the binary sensor.""" super().__init__() self._unique_id = f"{heater.serial_no}_failed" self.entity_id = f"{BINARY_SENSOR_DOMAIN}.{DOMAIN}_failed" self._name = "Boiler Fault" self._client = client self._heater = heater @property def is_on(self) -> bool: """Return the status of the sensor.""" return self._heater.status["is_failed"] @property def device_state_attributes(self) -> Optional[Dict[str, Any]]: """Return the device state attributes.""" return {"fault_code": self._heater.status["fault_code"]}
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/incomfort/binary_sensor.py
"""Manufacturer specific channels module for Zigbee Home Automation.""" from homeassistant.core import callback from .. import registries from ..const import ( ATTR_ATTRIBUTE_ID, ATTR_ATTRIBUTE_NAME, ATTR_VALUE, REPORT_CONFIG_ASAP, REPORT_CONFIG_MAX_INT, REPORT_CONFIG_MIN_INT, SIGNAL_ATTR_UPDATED, UNKNOWN, ) from .base import ZigbeeChannel @registries.ZIGBEE_CHANNEL_REGISTRY.register(registries.SMARTTHINGS_HUMIDITY_CLUSTER) class SmartThingsHumidity(ZigbeeChannel): """Smart Things Humidity channel.""" REPORT_CONFIG = [ { "attr": "measured_value", "config": (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 50), } ] @registries.CHANNEL_ONLY_CLUSTERS.register(0xFD00) @registries.ZIGBEE_CHANNEL_REGISTRY.register(0xFD00) class OsramButton(ZigbeeChannel): """Osram button channel.""" REPORT_CONFIG = [] @registries.CHANNEL_ONLY_CLUSTERS.register(registries.PHILLIPS_REMOTE_CLUSTER) @registries.ZIGBEE_CHANNEL_REGISTRY.register(registries.PHILLIPS_REMOTE_CLUSTER) class PhillipsRemote(ZigbeeChannel): """Phillips remote channel.""" REPORT_CONFIG = [] @registries.CHANNEL_ONLY_CLUSTERS.register(0xFCC0) @registries.ZIGBEE_CHANNEL_REGISTRY.register(0xFCC0) class OppleRemote(ZigbeeChannel): """Opple button channel.""" REPORT_CONFIG = [] @registries.ZIGBEE_CHANNEL_REGISTRY.register( registries.SMARTTHINGS_ACCELERATION_CLUSTER ) class SmartThingsAcceleration(ZigbeeChannel): """Smart Things Acceleration channel.""" REPORT_CONFIG = [ {"attr": "acceleration", "config": REPORT_CONFIG_ASAP}, {"attr": "x_axis", "config": REPORT_CONFIG_ASAP}, {"attr": "y_axis", "config": REPORT_CONFIG_ASAP}, {"attr": "z_axis", "config": REPORT_CONFIG_ASAP}, ] @callback def attribute_updated(self, attrid, value): """Handle attribute updates on this cluster.""" if attrid == self.value_attribute: self.async_send_signal( f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, self._cluster.attributes.get(attrid, [UNKNOWN])[0], value, ) return self.zha_send_event( SIGNAL_ATTR_UPDATED, { ATTR_ATTRIBUTE_ID: attrid, ATTR_ATTRIBUTE_NAME: self._cluster.attributes.get(attrid, [UNKNOWN])[0], ATTR_VALUE: value, }, )
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/zha/core/channels/manufacturerspecific.py
"""Signal handling related helpers.""" import logging import signal import sys from types import FrameType from homeassistant.const import RESTART_EXIT_CODE from homeassistant.core import HomeAssistant, callback from homeassistant.loader import bind_hass _LOGGER = logging.getLogger(__name__) @callback @bind_hass def async_register_signal_handling(hass: HomeAssistant) -> None: """Register system signal handler for core.""" if sys.platform != "win32": @callback def async_signal_handle(exit_code: int) -> None: """Wrap signal handling. * queue call to shutdown task * re-instate default handler """ hass.loop.remove_signal_handler(signal.SIGTERM) hass.loop.remove_signal_handler(signal.SIGINT) hass.async_create_task(hass.async_stop(exit_code)) try: hass.loop.add_signal_handler(signal.SIGTERM, async_signal_handle, 0) except ValueError: _LOGGER.warning("Could not bind to SIGTERM") try: hass.loop.add_signal_handler(signal.SIGINT, async_signal_handle, 0) except ValueError: _LOGGER.warning("Could not bind to SIGINT") try: hass.loop.add_signal_handler( signal.SIGHUP, async_signal_handle, RESTART_EXIT_CODE ) except ValueError: _LOGGER.warning("Could not bind to SIGHUP") else: old_sigterm = None old_sigint = None @callback def async_signal_handle(exit_code: int, frame: FrameType) -> None: """Wrap signal handling. * queue call to shutdown task * re-instate default handler """ signal.signal(signal.SIGTERM, old_sigterm) signal.signal(signal.SIGINT, old_sigint) hass.async_create_task(hass.async_stop(exit_code)) try: old_sigterm = signal.signal(signal.SIGTERM, async_signal_handle) except ValueError: _LOGGER.warning("Could not bind to SIGTERM") try: old_sigint = signal.signal(signal.SIGINT, async_signal_handle) except ValueError: _LOGGER.warning("Could not bind to SIGINT")
"""Tests for the Device Registry.""" import asyncio import pytest from homeassistant.const import EVENT_HOMEASSISTANT_STARTED from homeassistant.core import CoreState, callback from homeassistant.helpers import device_registry, entity_registry from tests.async_mock import patch from tests.common import MockConfigEntry, flush_store, mock_device_registry @pytest.fixture def registry(hass): """Return an empty, loaded, registry.""" return mock_device_registry(hass) @pytest.fixture def update_events(hass): """Capture update events.""" events = [] @callback def async_capture(event): events.append(event.data) hass.bus.async_listen(device_registry.EVENT_DEVICE_REGISTRY_UPDATED, async_capture) return events async def test_get_or_create_returns_same_entry(hass, registry, update_events): """Make sure we do not duplicate entries.""" entry = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, sw_version="sw-version", name="name", manufacturer="manufacturer", model="model", ) entry2 = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, "11:22:33:66:77:88")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry3 = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) assert len(registry.devices) == 1 assert entry.id == entry2.id assert entry.id == entry3.id assert entry.identifiers == {("bridgeid", "0123")} assert entry3.manufacturer == "manufacturer" assert entry3.model == "model" assert entry3.name == "name" assert entry3.sw_version == "sw-version" await hass.async_block_till_done() # Only 2 update events. The third entry did not generate any changes. assert len(update_events) == 2 assert update_events[0]["action"] == "create" assert update_events[0]["device_id"] == entry.id assert update_events[1]["action"] == "update" assert update_events[1]["device_id"] == entry.id async def test_requirement_for_identifier_or_connection(registry): """Make sure we do require some descriptor of device.""" entry = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers=set(), manufacturer="manufacturer", model="model", ) entry2 = registry.async_get_or_create( config_entry_id="1234", connections=set(), identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry3 = registry.async_get_or_create( config_entry_id="1234", connections=set(), identifiers=set(), manufacturer="manufacturer", model="model", ) assert len(registry.devices) == 2 assert entry assert entry2 assert entry3 is None async def test_multiple_config_entries(registry): """Make sure we do not get duplicate entries.""" entry = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry2 = registry.async_get_or_create( config_entry_id="456", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry3 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) assert len(registry.devices) == 1 assert entry.id == entry2.id assert entry.id == entry3.id assert entry2.config_entries == {"123", "456"} async def test_loading_from_storage(hass, hass_storage): """Test loading stored devices on start.""" hass_storage[device_registry.STORAGE_KEY] = { "version": device_registry.STORAGE_VERSION, "data": { "devices": [ { "config_entries": ["1234"], "connections": [["Zigbee", "01.23.45.67.89"]], "id": "abcdefghijklm", "identifiers": [["serial", "12:34:56:AB:CD:EF"]], "manufacturer": "manufacturer", "model": "model", "name": "name", "sw_version": "version", "entry_type": "service", "area_id": "12345A", "name_by_user": "Test Friendly Name", } ], "deleted_devices": [ { "config_entries": ["1234"], "connections": [["Zigbee", "23.45.67.89.01"]], "id": "bcdefghijklmn", "identifiers": [["serial", "34:56:AB:CD:EF:12"]], } ], }, } registry = await device_registry.async_get_registry(hass) assert len(registry.devices) == 1 assert len(registry.deleted_devices) == 1 entry = registry.async_get_or_create( config_entry_id="1234", connections={("Zigbee", "01.23.45.67.89")}, identifiers={("serial", "12:34:56:AB:CD:EF")}, manufacturer="manufacturer", model="model", ) assert entry.id == "abcdefghijklm" assert entry.area_id == "12345A" assert entry.name_by_user == "Test Friendly Name" assert entry.entry_type == "service" assert isinstance(entry.config_entries, set) assert isinstance(entry.connections, set) assert isinstance(entry.identifiers, set) entry = registry.async_get_or_create( config_entry_id="1234", connections={("Zigbee", "23.45.67.89.01")}, identifiers={("serial", "34:56:AB:CD:EF:12")}, manufacturer="manufacturer", model="model", ) assert entry.id == "bcdefghijklmn" assert isinstance(entry.config_entries, set) assert isinstance(entry.connections, set) assert isinstance(entry.identifiers, set) async def test_removing_config_entries(hass, registry, update_events): """Make sure we do not get duplicate entries.""" entry = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry2 = registry.async_get_or_create( config_entry_id="456", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry3 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:78:CD:EF:12")}, identifiers={("bridgeid", "4567")}, manufacturer="manufacturer", model="model", ) assert len(registry.devices) == 2 assert entry.id == entry2.id assert entry.id != entry3.id assert entry2.config_entries == {"123", "456"} registry.async_clear_config_entry("123") entry = registry.async_get_device({("bridgeid", "0123")}, set()) entry3_removed = registry.async_get_device({("bridgeid", "4567")}, set()) assert entry.config_entries == {"456"} assert entry3_removed is None await hass.async_block_till_done() assert len(update_events) == 5 assert update_events[0]["action"] == "create" assert update_events[0]["device_id"] == entry.id assert update_events[1]["action"] == "update" assert update_events[1]["device_id"] == entry2.id assert update_events[2]["action"] == "create" assert update_events[2]["device_id"] == entry3.id assert update_events[3]["action"] == "update" assert update_events[3]["device_id"] == entry.id assert update_events[4]["action"] == "remove" assert update_events[4]["device_id"] == entry3.id async def test_deleted_device_removing_config_entries(hass, registry, update_events): """Make sure we do not get duplicate entries.""" entry = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry2 = registry.async_get_or_create( config_entry_id="456", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry3 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:78:CD:EF:12")}, identifiers={("bridgeid", "4567")}, manufacturer="manufacturer", model="model", ) assert len(registry.devices) == 2 assert len(registry.deleted_devices) == 0 assert entry.id == entry2.id assert entry.id != entry3.id assert entry2.config_entries == {"123", "456"} registry.async_remove_device(entry.id) registry.async_remove_device(entry3.id) assert len(registry.devices) == 0 assert len(registry.deleted_devices) == 2 await hass.async_block_till_done() assert len(update_events) == 5 assert update_events[0]["action"] == "create" assert update_events[0]["device_id"] == entry.id assert update_events[1]["action"] == "update" assert update_events[1]["device_id"] == entry2.id assert update_events[2]["action"] == "create" assert update_events[2]["device_id"] == entry3.id assert update_events[3]["action"] == "remove" assert update_events[3]["device_id"] == entry.id assert update_events[4]["action"] == "remove" assert update_events[4]["device_id"] == entry3.id registry.async_clear_config_entry("123") assert len(registry.devices) == 0 assert len(registry.deleted_devices) == 1 registry.async_clear_config_entry("456") assert len(registry.devices) == 0 assert len(registry.deleted_devices) == 0 # No event when a deleted device is purged await hass.async_block_till_done() assert len(update_events) == 5 # Re-add, expect new device id entry2 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) assert entry.id != entry2.id async def test_removing_area_id(registry): """Make sure we can clear area id.""" entry = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry_w_area = registry.async_update_device(entry.id, area_id="12345A") registry.async_clear_area_id("12345A") entry_wo_area = registry.async_get_device({("bridgeid", "0123")}, set()) assert not entry_wo_area.area_id assert entry_w_area != entry_wo_area async def test_deleted_device_removing_area_id(registry): """Make sure we can clear area id of deleted device.""" entry = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry_w_area = registry.async_update_device(entry.id, area_id="12345A") registry.async_remove_device(entry.id) registry.async_clear_area_id("12345A") entry2 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) assert entry.id == entry2.id entry_wo_area = registry.async_get_device({("bridgeid", "0123")}, set()) assert not entry_wo_area.area_id assert entry_w_area != entry_wo_area async def test_specifying_via_device_create(registry): """Test specifying a via_device and updating.""" via = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("hue", "0123")}, manufacturer="manufacturer", model="via", ) light = registry.async_get_or_create( config_entry_id="456", connections=set(), identifiers={("hue", "456")}, manufacturer="manufacturer", model="light", via_device=("hue", "0123"), ) assert light.via_device_id == via.id async def test_specifying_via_device_update(registry): """Test specifying a via_device and updating.""" light = registry.async_get_or_create( config_entry_id="456", connections=set(), identifiers={("hue", "456")}, manufacturer="manufacturer", model="light", via_device=("hue", "0123"), ) assert light.via_device_id is None via = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("hue", "0123")}, manufacturer="manufacturer", model="via", ) light = registry.async_get_or_create( config_entry_id="456", connections=set(), identifiers={("hue", "456")}, manufacturer="manufacturer", model="light", via_device=("hue", "0123"), ) assert light.via_device_id == via.id async def test_loading_saving_data(hass, registry): """Test that we load/save data correctly.""" orig_via = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("hue", "0123")}, manufacturer="manufacturer", model="via", name="Original Name", sw_version="Orig SW 1", entry_type="device", ) orig_light = registry.async_get_or_create( config_entry_id="456", connections=set(), identifiers={("hue", "456")}, manufacturer="manufacturer", model="light", via_device=("hue", "0123"), ) orig_light2 = registry.async_get_or_create( config_entry_id="456", connections=set(), identifiers={("hue", "789")}, manufacturer="manufacturer", model="light", via_device=("hue", "0123"), ) registry.async_remove_device(orig_light2.id) assert len(registry.devices) == 2 assert len(registry.deleted_devices) == 1 orig_via = registry.async_update_device( orig_via.id, area_id="mock-area-id", name_by_user="mock-name-by-user" ) # Now load written data in new registry registry2 = device_registry.DeviceRegistry(hass) await flush_store(registry._store) await registry2.async_load() # Ensure same order assert list(registry.devices) == list(registry2.devices) assert list(registry.deleted_devices) == list(registry2.deleted_devices) new_via = registry2.async_get_device({("hue", "0123")}, set()) new_light = registry2.async_get_device({("hue", "456")}, set()) assert orig_via == new_via assert orig_light == new_light async def test_no_unnecessary_changes(registry): """Make sure we do not consider devices changes.""" entry = registry.async_get_or_create( config_entry_id="1234", connections={("ethernet", "12:34:56:78:90:AB:CD:EF")}, identifiers={("hue", "456"), ("bla", "123")}, ) with patch( "homeassistant.helpers.device_registry.DeviceRegistry.async_schedule_save" ) as mock_save: entry2 = registry.async_get_or_create( config_entry_id="1234", identifiers={("hue", "456")} ) assert entry.id == entry2.id assert len(mock_save.mock_calls) == 0 async def test_format_mac(registry): """Make sure we normalize mac addresses.""" entry = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) for mac in ["123456ABCDEF", "123456abcdef", "12:34:56:ab:cd:ef", "1234.56ab.cdef"]: test_entry = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, mac)}, ) assert test_entry.id == entry.id, mac assert test_entry.connections == { (device_registry.CONNECTION_NETWORK_MAC, "12:34:56:ab:cd:ef") } # This should not raise for invalid in [ "invalid_mac", "123456ABCDEFG", # 1 extra char "12:34:56:ab:cdef", # not enough : "12:34:56:ab:cd:e:f", # too many : "1234.56abcdef", # not enough . "123.456.abc.def", # too many . ]: invalid_mac_entry = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, invalid)}, ) assert list(invalid_mac_entry.connections)[0][1] == invalid async def test_update(registry): """Verify that we can update some attributes of a device.""" entry = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("hue", "456"), ("bla", "123")}, ) new_identifiers = {("hue", "654"), ("bla", "321")} assert not entry.area_id assert not entry.name_by_user with patch.object(registry, "async_schedule_save") as mock_save: updated_entry = registry.async_update_device( entry.id, area_id="12345A", manufacturer="Test Producer", model="Test Model", name_by_user="Test Friendly Name", new_identifiers=new_identifiers, via_device_id="98765B", ) assert mock_save.call_count == 1 assert updated_entry != entry assert updated_entry.area_id == "12345A" assert updated_entry.manufacturer == "Test Producer" assert updated_entry.model == "Test Model" assert updated_entry.name_by_user == "Test Friendly Name" assert updated_entry.identifiers == new_identifiers assert updated_entry.via_device_id == "98765B" assert registry.async_get_device({("hue", "456")}, {}) is None assert registry.async_get_device({("bla", "123")}, {}) is None assert registry.async_get_device({("hue", "654")}, {}) == updated_entry assert registry.async_get_device({("bla", "321")}, {}) == updated_entry assert ( registry.async_get_device( {}, {(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")} ) == updated_entry ) assert registry.async_get(updated_entry.id) is not None async def test_update_remove_config_entries(hass, registry, update_events): """Make sure we do not get duplicate entries.""" entry = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry2 = registry.async_get_or_create( config_entry_id="456", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry3 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:78:CD:EF:12")}, identifiers={("bridgeid", "4567")}, manufacturer="manufacturer", model="model", ) assert len(registry.devices) == 2 assert entry.id == entry2.id assert entry.id != entry3.id assert entry2.config_entries == {"123", "456"} updated_entry = registry.async_update_device( entry2.id, remove_config_entry_id="123" ) removed_entry = registry.async_update_device( entry3.id, remove_config_entry_id="123" ) assert updated_entry.config_entries == {"456"} assert removed_entry is None removed_entry = registry.async_get_device({("bridgeid", "4567")}, set()) assert removed_entry is None await hass.async_block_till_done() assert len(update_events) == 5 assert update_events[0]["action"] == "create" assert update_events[0]["device_id"] == entry.id assert update_events[1]["action"] == "update" assert update_events[1]["device_id"] == entry2.id assert update_events[2]["action"] == "create" assert update_events[2]["device_id"] == entry3.id assert update_events[3]["action"] == "update" assert update_events[3]["device_id"] == entry.id assert update_events[4]["action"] == "remove" assert update_events[4]["device_id"] == entry3.id async def test_loading_race_condition(hass): """Test only one storage load called when concurrent loading occurred .""" with patch( "homeassistant.helpers.device_registry.DeviceRegistry.async_load" ) as mock_load: results = await asyncio.gather( device_registry.async_get_registry(hass), device_registry.async_get_registry(hass), ) mock_load.assert_called_once_with() assert results[0] == results[1] async def test_update_sw_version(registry): """Verify that we can update software version of a device.""" entry = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bla", "123")}, ) assert not entry.sw_version sw_version = "0x20020263" with patch.object(registry, "async_schedule_save") as mock_save: updated_entry = registry.async_update_device(entry.id, sw_version=sw_version) assert mock_save.call_count == 1 assert updated_entry != entry assert updated_entry.sw_version == sw_version async def test_cleanup_device_registry(hass, registry): """Test cleanup works.""" config_entry = MockConfigEntry(domain="hue") config_entry.add_to_hass(hass) d1 = registry.async_get_or_create( identifiers={("hue", "d1")}, config_entry_id=config_entry.entry_id ) registry.async_get_or_create( identifiers={("hue", "d2")}, config_entry_id=config_entry.entry_id ) d3 = registry.async_get_or_create( identifiers={("hue", "d3")}, config_entry_id=config_entry.entry_id ) registry.async_get_or_create( identifiers={("something", "d4")}, config_entry_id="non_existing" ) ent_reg = await entity_registry.async_get_registry(hass) ent_reg.async_get_or_create("light", "hue", "e1", device_id=d1.id) ent_reg.async_get_or_create("light", "hue", "e2", device_id=d1.id) ent_reg.async_get_or_create("light", "hue", "e3", device_id=d3.id) device_registry.async_cleanup(hass, registry, ent_reg) assert registry.async_get_device({("hue", "d1")}, set()) is not None assert registry.async_get_device({("hue", "d2")}, set()) is not None assert registry.async_get_device({("hue", "d3")}, set()) is not None assert registry.async_get_device({("something", "d4")}, set()) is None async def test_cleanup_startup(hass): """Test we run a cleanup on startup.""" hass.state = CoreState.not_running await device_registry.async_get_registry(hass) with patch( "homeassistant.helpers.device_registry.Debouncer.async_call" ) as mock_call: hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert len(mock_call.mock_calls) == 1 async def test_cleanup_entity_registry_change(hass): """Test we run a cleanup when entity registry changes.""" await device_registry.async_get_registry(hass) ent_reg = await entity_registry.async_get_registry(hass) with patch( "homeassistant.helpers.device_registry.Debouncer.async_call" ) as mock_call: entity = ent_reg.async_get_or_create("light", "hue", "e1") await hass.async_block_till_done() assert len(mock_call.mock_calls) == 0 # Normal update does not trigger ent_reg.async_update_entity(entity.entity_id, name="updated") await hass.async_block_till_done() assert len(mock_call.mock_calls) == 0 # Device ID update triggers ent_reg.async_get_or_create("light", "hue", "e1", device_id="bla") await hass.async_block_till_done() assert len(mock_call.mock_calls) == 1 # Removal also triggers ent_reg.async_remove(entity.entity_id) await hass.async_block_till_done() assert len(mock_call.mock_calls) == 2 async def test_restore_device(hass, registry, update_events): """Make sure device id is stable.""" entry = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) assert len(registry.devices) == 1 assert len(registry.deleted_devices) == 0 registry.async_remove_device(entry.id) assert len(registry.devices) == 0 assert len(registry.deleted_devices) == 1 entry2 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:78:CD:EF:12")}, identifiers={("bridgeid", "4567")}, manufacturer="manufacturer", model="model", ) entry3 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) assert entry.id == entry3.id assert entry.id != entry2.id assert len(registry.devices) == 2 assert len(registry.deleted_devices) == 0 assert isinstance(entry3.config_entries, set) assert isinstance(entry3.connections, set) assert isinstance(entry3.identifiers, set) await hass.async_block_till_done() assert len(update_events) == 4 assert update_events[0]["action"] == "create" assert update_events[0]["device_id"] == entry.id assert update_events[1]["action"] == "remove" assert update_events[1]["device_id"] == entry.id assert update_events[2]["action"] == "create" assert update_events[2]["device_id"] == entry2.id assert update_events[3]["action"] == "create" assert update_events[3]["device_id"] == entry3.id async def test_restore_simple_device(hass, registry, update_events): """Make sure device id is stable.""" entry = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, ) assert len(registry.devices) == 1 assert len(registry.deleted_devices) == 0 registry.async_remove_device(entry.id) assert len(registry.devices) == 0 assert len(registry.deleted_devices) == 1 entry2 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:78:CD:EF:12")}, identifiers={("bridgeid", "4567")}, ) entry3 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, ) assert entry.id == entry3.id assert entry.id != entry2.id assert len(registry.devices) == 2 assert len(registry.deleted_devices) == 0 await hass.async_block_till_done() assert len(update_events) == 4 assert update_events[0]["action"] == "create" assert update_events[0]["device_id"] == entry.id assert update_events[1]["action"] == "remove" assert update_events[1]["device_id"] == entry.id assert update_events[2]["action"] == "create" assert update_events[2]["device_id"] == entry2.id assert update_events[3]["action"] == "create" assert update_events[3]["device_id"] == entry3.id
mKeRix/home-assistant
tests/helpers/test_device_registry.py
homeassistant/helpers/signal.py
"""Template helper methods for rendering strings with Home Assistant data.""" import base64 import collections.abc from datetime import datetime from functools import wraps import json import logging import math import random import re from typing import Any, Dict, Iterable, List, Optional, Union from urllib.parse import urlencode as urllib_urlencode import jinja2 from jinja2 import contextfilter, contextfunction from jinja2.sandbox import ImmutableSandboxedEnvironment from jinja2.utils import Namespace # type: ignore from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_LATITUDE, ATTR_LONGITUDE, ATTR_UNIT_OF_MEASUREMENT, LENGTH_METERS, MATCH_ALL, STATE_UNKNOWN, ) from homeassistant.core import State, callback, split_entity_id, valid_entity_id from homeassistant.exceptions import TemplateError from homeassistant.helpers import location as loc_helper from homeassistant.helpers.typing import HomeAssistantType, TemplateVarsType from homeassistant.loader import bind_hass from homeassistant.util import convert, dt as dt_util, location as loc_util from homeassistant.util.async_ import run_callback_threadsafe # mypy: allow-untyped-calls, allow-untyped-defs # mypy: no-check-untyped-defs, no-warn-return-any _LOGGER = logging.getLogger(__name__) _SENTINEL = object() DATE_STR_FORMAT = "%Y-%m-%d %H:%M:%S" _RENDER_INFO = "template.render_info" _ENVIRONMENT = "template.environment" _RE_NONE_ENTITIES = re.compile(r"distance\(|closest\(", re.I | re.M) _RE_GET_ENTITIES = re.compile( r"(?:(?:states\.|(?P<func>is_state|is_state_attr|state_attr|states|expand)" r"\((?:[\ \'\"]?))(?P<entity_id>[\w]+\.[\w]+)|(?P<variable>[\w]+))", re.I | re.M, ) _RE_JINJA_DELIMITERS = re.compile(r"\{%|\{\{") @bind_hass def attach(hass: HomeAssistantType, obj: Any) -> None: """Recursively attach hass to all template instances in list and dict.""" if isinstance(obj, list): for child in obj: attach(hass, child) elif isinstance(obj, dict): for child in obj.values(): attach(hass, child) elif isinstance(obj, Template): obj.hass = hass def render_complex(value: Any, variables: TemplateVarsType = None) -> Any: """Recursive template creator helper function.""" if isinstance(value, list): return [render_complex(item, variables) for item in value] if isinstance(value, dict): return {key: render_complex(item, variables) for key, item in value.items()} if isinstance(value, Template): return value.async_render(variables) return value def extract_entities( hass: HomeAssistantType, template: Optional[str], variables: Optional[Dict[str, Any]] = None, ) -> Union[str, List[str]]: """Extract all entities for state_changed listener from template string.""" if template is None or _RE_JINJA_DELIMITERS.search(template) is None: return [] if _RE_NONE_ENTITIES.search(template): return MATCH_ALL extraction_final = [] for result in _RE_GET_ENTITIES.finditer(template): if ( result.group("entity_id") == "trigger.entity_id" and variables and "trigger" in variables and "entity_id" in variables["trigger"] ): extraction_final.append(variables["trigger"]["entity_id"]) elif result.group("entity_id"): if result.group("func") == "expand": for entity in expand(hass, result.group("entity_id")): extraction_final.append(entity.entity_id) extraction_final.append(result.group("entity_id")) if ( variables and result.group("variable") in variables and isinstance(variables[result.group("variable")], str) and valid_entity_id(variables[result.group("variable")]) ): extraction_final.append(variables[result.group("variable")]) if extraction_final: return list(set(extraction_final)) return MATCH_ALL def _true(arg: Any) -> bool: return True class RenderInfo: """Holds information about a template render.""" def __init__(self, template): """Initialise.""" self.template = template # Will be set sensibly once frozen. self.filter_lifecycle = _true self._result = None self._exception = None self._all_states = False self._domains = [] self._entities = [] def filter(self, entity_id: str) -> bool: """Template should re-render if the state changes.""" return entity_id in self._entities def _filter_lifecycle(self, entity_id: str) -> bool: """Template should re-render if the state changes.""" return ( split_entity_id(entity_id)[0] in self._domains or entity_id in self._entities ) @property def result(self) -> str: """Results of the template computation.""" if self._exception is not None: raise self._exception return self._result def _freeze(self) -> None: self._entities = frozenset(self._entities) if self._all_states: # Leave lifecycle_filter as True del self._domains elif not self._domains: del self._domains self.filter_lifecycle = self.filter else: self._domains = frozenset(self._domains) self.filter_lifecycle = self._filter_lifecycle class Template: """Class to hold a template and manage caching and rendering.""" def __init__(self, template, hass=None): """Instantiate a template.""" if not isinstance(template, str): raise TypeError("Expected template to be a string") self.template: str = template self._compiled_code = None self._compiled = None self.hass = hass @property def _env(self): if self.hass is None: return _NO_HASS_ENV ret = self.hass.data.get(_ENVIRONMENT) if ret is None: ret = self.hass.data[_ENVIRONMENT] = TemplateEnvironment(self.hass) return ret def ensure_valid(self): """Return if template is valid.""" if self._compiled_code is not None: return try: self._compiled_code = self._env.compile(self.template) except jinja2.exceptions.TemplateSyntaxError as err: raise TemplateError(err) def extract_entities( self, variables: Optional[Dict[str, Any]] = None ) -> Union[str, List[str]]: """Extract all entities for state_changed listener.""" return extract_entities(self.hass, self.template, variables) def render(self, variables: TemplateVarsType = None, **kwargs: Any) -> str: """Render given template.""" if variables is not None: kwargs.update(variables) return run_callback_threadsafe( self.hass.loop, self.async_render, kwargs ).result() @callback def async_render(self, variables: TemplateVarsType = None, **kwargs: Any) -> str: """Render given template. This method must be run in the event loop. """ compiled = self._compiled or self._ensure_compiled() if variables is not None: kwargs.update(variables) try: return compiled.render(kwargs).strip() except jinja2.TemplateError as err: raise TemplateError(err) @callback def async_render_to_info( self, variables: TemplateVarsType = None, **kwargs: Any ) -> RenderInfo: """Render the template and collect an entity filter.""" assert self.hass and _RENDER_INFO not in self.hass.data render_info = self.hass.data[_RENDER_INFO] = RenderInfo(self) # pylint: disable=protected-access try: render_info._result = self.async_render(variables, **kwargs) except TemplateError as ex: render_info._exception = ex finally: del self.hass.data[_RENDER_INFO] render_info._freeze() return render_info def render_with_possible_json_value(self, value, error_value=_SENTINEL): """Render template with value exposed. If valid JSON will expose value_json too. """ return run_callback_threadsafe( self.hass.loop, self.async_render_with_possible_json_value, value, error_value, ).result() @callback def async_render_with_possible_json_value( self, value, error_value=_SENTINEL, variables=None ): """Render template with value exposed. If valid JSON will expose value_json too. This method must be run in the event loop. """ if self._compiled is None: self._ensure_compiled() variables = dict(variables or {}) variables["value"] = value try: variables["value_json"] = json.loads(value) except (ValueError, TypeError): pass try: return self._compiled.render(variables).strip() except jinja2.TemplateError as ex: if error_value is _SENTINEL: _LOGGER.error( "Error parsing value: %s (value: %s, template: %s)", ex, value, self.template, ) return value if error_value is _SENTINEL else error_value def _ensure_compiled(self): """Bind a template to a specific hass instance.""" self.ensure_valid() assert self.hass is not None, "hass variable not set on template" env = self._env self._compiled = jinja2.Template.from_code( env, self._compiled_code, env.globals, None ) return self._compiled def __eq__(self, other): """Compare template with another.""" return ( self.__class__ == other.__class__ and self.template == other.template and self.hass == other.hass ) def __hash__(self) -> int: """Hash code for template.""" return hash(self.template) def __repr__(self) -> str: """Representation of Template.""" return 'Template("' + self.template + '")' class AllStates: """Class to expose all HA states as attributes.""" def __init__(self, hass): """Initialize all states.""" self._hass = hass def __getattr__(self, name): """Return the domain state.""" if "." in name: if not valid_entity_id(name): raise TemplateError(f"Invalid entity ID '{name}'") return _get_state(self._hass, name) if not valid_entity_id(f"{name}.entity"): raise TemplateError(f"Invalid domain name '{name}'") return DomainStates(self._hass, name) def _collect_all(self) -> None: render_info = self._hass.data.get(_RENDER_INFO) if render_info is not None: # pylint: disable=protected-access render_info._all_states = True def __iter__(self): """Return all states.""" self._collect_all() return iter( _wrap_state(self._hass, state) for state in sorted( self._hass.states.async_all(), key=lambda state: state.entity_id ) ) def __len__(self) -> int: """Return number of states.""" self._collect_all() return len(self._hass.states.async_entity_ids()) def __call__(self, entity_id): """Return the states.""" state = _get_state(self._hass, entity_id) return STATE_UNKNOWN if state is None else state.state def __repr__(self) -> str: """Representation of All States.""" return "<template AllStates>" class DomainStates: """Class to expose a specific HA domain as attributes.""" def __init__(self, hass, domain): """Initialize the domain states.""" self._hass = hass self._domain = domain def __getattr__(self, name): """Return the states.""" entity_id = f"{self._domain}.{name}" if not valid_entity_id(entity_id): raise TemplateError(f"Invalid entity ID '{entity_id}'") return _get_state(self._hass, entity_id) def _collect_domain(self) -> None: entity_collect = self._hass.data.get(_RENDER_INFO) if entity_collect is not None: # pylint: disable=protected-access entity_collect._domains.append(self._domain) def __iter__(self): """Return the iteration over all the states.""" self._collect_domain() return iter( sorted( ( _wrap_state(self._hass, state) for state in self._hass.states.async_all() if state.domain == self._domain ), key=lambda state: state.entity_id, ) ) def __len__(self) -> int: """Return number of states.""" self._collect_domain() return len(self._hass.states.async_entity_ids(self._domain)) def __repr__(self) -> str: """Representation of Domain States.""" return f"<template DomainStates('{self._domain}')>" class TemplateState(State): """Class to represent a state object in a template.""" # Inheritance is done so functions that check against State keep working # pylint: disable=super-init-not-called def __init__(self, hass, state): """Initialize template state.""" self._hass = hass self._state = state def _access_state(self): state = object.__getattribute__(self, "_state") hass = object.__getattribute__(self, "_hass") _collect_state(hass, state.entity_id) return state @property def state_with_unit(self) -> str: """Return the state concatenated with the unit if available.""" state = object.__getattribute__(self, "_access_state")() unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) if unit is None: return state.state return f"{state.state} {unit}" def __getattribute__(self, name): """Return an attribute of the state.""" # This one doesn't count as an access of the state # since we either found it by looking direct for the ID # or got it off an iterator. if name == "entity_id" or name in object.__dict__: state = object.__getattribute__(self, "_state") return getattr(state, name) if name in TemplateState.__dict__: return object.__getattribute__(self, name) state = object.__getattribute__(self, "_access_state")() return getattr(state, name) def __repr__(self) -> str: """Representation of Template State.""" state = object.__getattribute__(self, "_access_state")() rep = state.__repr__() return f"<template {rep[1:]}" def _collect_state(hass: HomeAssistantType, entity_id: str) -> None: entity_collect = hass.data.get(_RENDER_INFO) if entity_collect is not None: # pylint: disable=protected-access entity_collect._entities.append(entity_id) def _wrap_state( hass: HomeAssistantType, state: Optional[State] ) -> Optional[TemplateState]: """Wrap a state.""" return None if state is None else TemplateState(hass, state) def _get_state(hass: HomeAssistantType, entity_id: str) -> Optional[TemplateState]: state = hass.states.get(entity_id) if state is None: # Only need to collect if none, if not none collect first actual # access to the state properties in the state wrapper. _collect_state(hass, entity_id) return None return _wrap_state(hass, state) def _resolve_state( hass: HomeAssistantType, entity_id_or_state: Any ) -> Union[State, TemplateState, None]: """Return state or entity_id if given.""" if isinstance(entity_id_or_state, State): return entity_id_or_state if isinstance(entity_id_or_state, str): return _get_state(hass, entity_id_or_state) return None def expand(hass: HomeAssistantType, *args: Any) -> Iterable[State]: """Expand out any groups into entity states.""" search = list(args) found = {} while search: entity = search.pop() if isinstance(entity, str): entity_id = entity entity = _get_state(hass, entity) if entity is None: continue elif isinstance(entity, State): entity_id = entity.entity_id elif isinstance(entity, collections.abc.Iterable): search += entity continue else: # ignore other types continue # pylint: disable=import-outside-toplevel from homeassistant.components import group if split_entity_id(entity_id)[0] == group.DOMAIN: # Collect state will be called in here since it's wrapped group_entities = entity.attributes.get(ATTR_ENTITY_ID) if group_entities: search += group_entities else: found[entity_id] = entity return sorted(found.values(), key=lambda a: a.entity_id) def closest(hass, *args): """Find closest entity. Closest to home: closest(states) closest(states.device_tracker) closest('group.children') closest(states.group.children) Closest to a point: closest(23.456, 23.456, 'group.children') closest('zone.school', 'group.children') closest(states.zone.school, 'group.children') As a filter: states | closest states.device_tracker | closest ['group.children', states.device_tracker] | closest 'group.children' | closest(23.456, 23.456) states.device_tracker | closest('zone.school') 'group.children' | closest(states.zone.school) """ if len(args) == 1: latitude = hass.config.latitude longitude = hass.config.longitude entities = args[0] elif len(args) == 2: point_state = _resolve_state(hass, args[0]) if point_state is None: _LOGGER.warning("Closest:Unable to find state %s", args[0]) return None if not loc_helper.has_location(point_state): _LOGGER.warning( "Closest:State does not contain valid location: %s", point_state ) return None latitude = point_state.attributes.get(ATTR_LATITUDE) longitude = point_state.attributes.get(ATTR_LONGITUDE) entities = args[1] else: latitude = convert(args[0], float) longitude = convert(args[1], float) if latitude is None or longitude is None: _LOGGER.warning( "Closest:Received invalid coordinates: %s, %s", args[0], args[1] ) return None entities = args[2] states = expand(hass, entities) # state will already be wrapped here return loc_helper.closest(latitude, longitude, states) def closest_filter(hass, *args): """Call closest as a filter. Need to reorder arguments.""" new_args = list(args[1:]) new_args.append(args[0]) return closest(hass, *new_args) def distance(hass, *args): """Calculate distance. Will calculate distance from home to a point or between points. Points can be passed in using state objects or lat/lng coordinates. """ locations = [] to_process = list(args) while to_process: value = to_process.pop(0) point_state = _resolve_state(hass, value) if point_state is None: # We expect this and next value to be lat&lng if not to_process: _LOGGER.warning( "Distance:Expected latitude and longitude, got %s", value ) return None value_2 = to_process.pop(0) latitude = convert(value, float) longitude = convert(value_2, float) if latitude is None or longitude is None: _LOGGER.warning( "Distance:Unable to process latitude and longitude: %s, %s", value, value_2, ) return None else: if not loc_helper.has_location(point_state): _LOGGER.warning( "distance:State does not contain valid location: %s", point_state ) return None latitude = point_state.attributes.get(ATTR_LATITUDE) longitude = point_state.attributes.get(ATTR_LONGITUDE) locations.append((latitude, longitude)) if len(locations) == 1: return hass.config.distance(*locations[0]) return hass.config.units.length( loc_util.distance(*locations[0] + locations[1]), LENGTH_METERS ) def is_state(hass: HomeAssistantType, entity_id: str, state: State) -> bool: """Test if a state is a specific value.""" state_obj = _get_state(hass, entity_id) return state_obj is not None and state_obj.state == state def is_state_attr(hass, entity_id, name, value): """Test if a state's attribute is a specific value.""" attr = state_attr(hass, entity_id, name) return attr is not None and attr == value def state_attr(hass, entity_id, name): """Get a specific attribute from a state.""" state_obj = _get_state(hass, entity_id) if state_obj is not None: return state_obj.attributes.get(name) return None def forgiving_round(value, precision=0, method="common"): """Round accepted strings.""" try: # support rounding methods like jinja multiplier = float(10 ** precision) if method == "ceil": value = math.ceil(float(value) * multiplier) / multiplier elif method == "floor": value = math.floor(float(value) * multiplier) / multiplier elif method == "half": value = round(float(value) * 2) / 2 else: # if method is common or something else, use common rounding value = round(float(value), precision) return int(value) if precision == 0 else value except (ValueError, TypeError): # If value can't be converted to float return value def multiply(value, amount): """Filter to convert value to float and multiply it.""" try: return float(value) * amount except (ValueError, TypeError): # If value can't be converted to float return value def logarithm(value, base=math.e): """Filter to get logarithm of the value with a specific base.""" try: return math.log(float(value), float(base)) except (ValueError, TypeError): return value def sine(value): """Filter to get sine of the value.""" try: return math.sin(float(value)) except (ValueError, TypeError): return value def cosine(value): """Filter to get cosine of the value.""" try: return math.cos(float(value)) except (ValueError, TypeError): return value def tangent(value): """Filter to get tangent of the value.""" try: return math.tan(float(value)) except (ValueError, TypeError): return value def arc_sine(value): """Filter to get arc sine of the value.""" try: return math.asin(float(value)) except (ValueError, TypeError): return value def arc_cosine(value): """Filter to get arc cosine of the value.""" try: return math.acos(float(value)) except (ValueError, TypeError): return value def arc_tangent(value): """Filter to get arc tangent of the value.""" try: return math.atan(float(value)) except (ValueError, TypeError): return value def arc_tangent2(*args): """Filter to calculate four quadrant arc tangent of y / x.""" try: if len(args) == 1 and isinstance(args[0], (list, tuple)): args = args[0] return math.atan2(float(args[0]), float(args[1])) except (ValueError, TypeError): return args def square_root(value): """Filter to get square root of the value.""" try: return math.sqrt(float(value)) except (ValueError, TypeError): return value def timestamp_custom(value, date_format=DATE_STR_FORMAT, local=True): """Filter to convert given timestamp to format.""" try: date = dt_util.utc_from_timestamp(value) if local: date = dt_util.as_local(date) return date.strftime(date_format) except (ValueError, TypeError): # If timestamp can't be converted return value def timestamp_local(value): """Filter to convert given timestamp to local date/time.""" try: return dt_util.as_local(dt_util.utc_from_timestamp(value)).strftime( DATE_STR_FORMAT ) except (ValueError, TypeError): # If timestamp can't be converted return value def timestamp_utc(value): """Filter to convert given timestamp to UTC date/time.""" try: return dt_util.utc_from_timestamp(value).strftime(DATE_STR_FORMAT) except (ValueError, TypeError): # If timestamp can't be converted return value def forgiving_as_timestamp(value): """Try to convert value to timestamp.""" try: return dt_util.as_timestamp(value) except (ValueError, TypeError): return None def strptime(string, fmt): """Parse a time string to datetime.""" try: return datetime.strptime(string, fmt) except (ValueError, AttributeError): return string def fail_when_undefined(value): """Filter to force a failure when the value is undefined.""" if isinstance(value, jinja2.Undefined): value() return value def forgiving_float(value): """Try to convert value to a float.""" try: return float(value) except (ValueError, TypeError): return value def regex_match(value, find="", ignorecase=False): """Match value using regex.""" if not isinstance(value, str): value = str(value) flags = re.I if ignorecase else 0 return bool(re.match(find, value, flags)) def regex_replace(value="", find="", replace="", ignorecase=False): """Replace using regex.""" if not isinstance(value, str): value = str(value) flags = re.I if ignorecase else 0 regex = re.compile(find, flags) return regex.sub(replace, value) def regex_search(value, find="", ignorecase=False): """Search using regex.""" if not isinstance(value, str): value = str(value) flags = re.I if ignorecase else 0 return bool(re.search(find, value, flags)) def regex_findall_index(value, find="", index=0, ignorecase=False): """Find all matches using regex and then pick specific match index.""" if not isinstance(value, str): value = str(value) flags = re.I if ignorecase else 0 return re.findall(find, value, flags)[index] def bitwise_and(first_value, second_value): """Perform a bitwise and operation.""" return first_value & second_value def bitwise_or(first_value, second_value): """Perform a bitwise or operation.""" return first_value | second_value def base64_encode(value): """Perform base64 encode.""" return base64.b64encode(value.encode("utf-8")).decode("utf-8") def base64_decode(value): """Perform base64 denode.""" return base64.b64decode(value).decode("utf-8") def ordinal(value): """Perform ordinal conversion.""" return str(value) + ( list(["th", "st", "nd", "rd"] + ["th"] * 6)[(int(str(value)[-1])) % 10] if int(str(value)[-2:]) % 100 not in range(11, 14) else "th" ) def from_json(value): """Convert a JSON string to an object.""" return json.loads(value) def to_json(value): """Convert an object to a JSON string.""" return json.dumps(value) @contextfilter def random_every_time(context, values): """Choose a random value. Unlike Jinja's random filter, this is context-dependent to avoid caching the chosen value. """ return random.choice(values) def relative_time(value): """ Take a datetime and return its "age" as a string. The age can be in second, minute, hour, day, month or year. Only the biggest unit is considered, e.g. if it's 2 days and 3 hours, "2 days" will be returned. Make sure date is not in the future, or else it will return None. If the input are not a datetime object the input will be returned unmodified. """ if not isinstance(value, datetime): return value if not value.tzinfo: value = dt_util.as_local(value) if dt_util.now() < value: return value return dt_util.get_age(value) def urlencode(value): """Urlencode dictionary and return as UTF-8 string.""" return urllib_urlencode(value).encode("utf-8") class TemplateEnvironment(ImmutableSandboxedEnvironment): """The Home Assistant template environment.""" def __init__(self, hass): """Initialise template environment.""" super().__init__() self.hass = hass self.filters["round"] = forgiving_round self.filters["multiply"] = multiply self.filters["log"] = logarithm self.filters["sin"] = sine self.filters["cos"] = cosine self.filters["tan"] = tangent self.filters["asin"] = arc_sine self.filters["acos"] = arc_cosine self.filters["atan"] = arc_tangent self.filters["atan2"] = arc_tangent2 self.filters["sqrt"] = square_root self.filters["as_timestamp"] = forgiving_as_timestamp self.filters["timestamp_custom"] = timestamp_custom self.filters["timestamp_local"] = timestamp_local self.filters["timestamp_utc"] = timestamp_utc self.filters["to_json"] = to_json self.filters["from_json"] = from_json self.filters["is_defined"] = fail_when_undefined self.filters["max"] = max self.filters["min"] = min self.filters["random"] = random_every_time self.filters["base64_encode"] = base64_encode self.filters["base64_decode"] = base64_decode self.filters["ordinal"] = ordinal self.filters["regex_match"] = regex_match self.filters["regex_replace"] = regex_replace self.filters["regex_search"] = regex_search self.filters["regex_findall_index"] = regex_findall_index self.filters["bitwise_and"] = bitwise_and self.filters["bitwise_or"] = bitwise_or self.filters["ord"] = ord self.globals["log"] = logarithm self.globals["sin"] = sine self.globals["cos"] = cosine self.globals["tan"] = tangent self.globals["sqrt"] = square_root self.globals["pi"] = math.pi self.globals["tau"] = math.pi * 2 self.globals["e"] = math.e self.globals["asin"] = arc_sine self.globals["acos"] = arc_cosine self.globals["atan"] = arc_tangent self.globals["atan2"] = arc_tangent2 self.globals["float"] = forgiving_float self.globals["now"] = dt_util.now self.globals["utcnow"] = dt_util.utcnow self.globals["as_timestamp"] = forgiving_as_timestamp self.globals["relative_time"] = relative_time self.globals["strptime"] = strptime self.globals["urlencode"] = urlencode if hass is None: return # We mark these as a context functions to ensure they get # evaluated fresh with every execution, rather than executed # at compile time and the value stored. The context itself # can be discarded, we only need to get at the hass object. def hassfunction(func): """Wrap function that depend on hass.""" @wraps(func) def wrapper(*args, **kwargs): return func(hass, *args[1:], **kwargs) return contextfunction(wrapper) self.globals["expand"] = hassfunction(expand) self.filters["expand"] = contextfilter(self.globals["expand"]) self.globals["closest"] = hassfunction(closest) self.filters["closest"] = contextfilter(hassfunction(closest_filter)) self.globals["distance"] = hassfunction(distance) self.globals["is_state"] = hassfunction(is_state) self.globals["is_state_attr"] = hassfunction(is_state_attr) self.globals["state_attr"] = hassfunction(state_attr) self.globals["states"] = AllStates(hass) def is_safe_callable(self, obj): """Test if callback is safe.""" return isinstance(obj, AllStates) or super().is_safe_callable(obj) def is_safe_attribute(self, obj, attr, value): """Test if attribute is safe.""" return isinstance(obj, Namespace) or super().is_safe_attribute(obj, attr, value) _NO_HASS_ENV = TemplateEnvironment(None)
"""Tests for the Device Registry.""" import asyncio import pytest from homeassistant.const import EVENT_HOMEASSISTANT_STARTED from homeassistant.core import CoreState, callback from homeassistant.helpers import device_registry, entity_registry from tests.async_mock import patch from tests.common import MockConfigEntry, flush_store, mock_device_registry @pytest.fixture def registry(hass): """Return an empty, loaded, registry.""" return mock_device_registry(hass) @pytest.fixture def update_events(hass): """Capture update events.""" events = [] @callback def async_capture(event): events.append(event.data) hass.bus.async_listen(device_registry.EVENT_DEVICE_REGISTRY_UPDATED, async_capture) return events async def test_get_or_create_returns_same_entry(hass, registry, update_events): """Make sure we do not duplicate entries.""" entry = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, sw_version="sw-version", name="name", manufacturer="manufacturer", model="model", ) entry2 = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, "11:22:33:66:77:88")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry3 = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) assert len(registry.devices) == 1 assert entry.id == entry2.id assert entry.id == entry3.id assert entry.identifiers == {("bridgeid", "0123")} assert entry3.manufacturer == "manufacturer" assert entry3.model == "model" assert entry3.name == "name" assert entry3.sw_version == "sw-version" await hass.async_block_till_done() # Only 2 update events. The third entry did not generate any changes. assert len(update_events) == 2 assert update_events[0]["action"] == "create" assert update_events[0]["device_id"] == entry.id assert update_events[1]["action"] == "update" assert update_events[1]["device_id"] == entry.id async def test_requirement_for_identifier_or_connection(registry): """Make sure we do require some descriptor of device.""" entry = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers=set(), manufacturer="manufacturer", model="model", ) entry2 = registry.async_get_or_create( config_entry_id="1234", connections=set(), identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry3 = registry.async_get_or_create( config_entry_id="1234", connections=set(), identifiers=set(), manufacturer="manufacturer", model="model", ) assert len(registry.devices) == 2 assert entry assert entry2 assert entry3 is None async def test_multiple_config_entries(registry): """Make sure we do not get duplicate entries.""" entry = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry2 = registry.async_get_or_create( config_entry_id="456", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry3 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) assert len(registry.devices) == 1 assert entry.id == entry2.id assert entry.id == entry3.id assert entry2.config_entries == {"123", "456"} async def test_loading_from_storage(hass, hass_storage): """Test loading stored devices on start.""" hass_storage[device_registry.STORAGE_KEY] = { "version": device_registry.STORAGE_VERSION, "data": { "devices": [ { "config_entries": ["1234"], "connections": [["Zigbee", "01.23.45.67.89"]], "id": "abcdefghijklm", "identifiers": [["serial", "12:34:56:AB:CD:EF"]], "manufacturer": "manufacturer", "model": "model", "name": "name", "sw_version": "version", "entry_type": "service", "area_id": "12345A", "name_by_user": "Test Friendly Name", } ], "deleted_devices": [ { "config_entries": ["1234"], "connections": [["Zigbee", "23.45.67.89.01"]], "id": "bcdefghijklmn", "identifiers": [["serial", "34:56:AB:CD:EF:12"]], } ], }, } registry = await device_registry.async_get_registry(hass) assert len(registry.devices) == 1 assert len(registry.deleted_devices) == 1 entry = registry.async_get_or_create( config_entry_id="1234", connections={("Zigbee", "01.23.45.67.89")}, identifiers={("serial", "12:34:56:AB:CD:EF")}, manufacturer="manufacturer", model="model", ) assert entry.id == "abcdefghijklm" assert entry.area_id == "12345A" assert entry.name_by_user == "Test Friendly Name" assert entry.entry_type == "service" assert isinstance(entry.config_entries, set) assert isinstance(entry.connections, set) assert isinstance(entry.identifiers, set) entry = registry.async_get_or_create( config_entry_id="1234", connections={("Zigbee", "23.45.67.89.01")}, identifiers={("serial", "34:56:AB:CD:EF:12")}, manufacturer="manufacturer", model="model", ) assert entry.id == "bcdefghijklmn" assert isinstance(entry.config_entries, set) assert isinstance(entry.connections, set) assert isinstance(entry.identifiers, set) async def test_removing_config_entries(hass, registry, update_events): """Make sure we do not get duplicate entries.""" entry = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry2 = registry.async_get_or_create( config_entry_id="456", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry3 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:78:CD:EF:12")}, identifiers={("bridgeid", "4567")}, manufacturer="manufacturer", model="model", ) assert len(registry.devices) == 2 assert entry.id == entry2.id assert entry.id != entry3.id assert entry2.config_entries == {"123", "456"} registry.async_clear_config_entry("123") entry = registry.async_get_device({("bridgeid", "0123")}, set()) entry3_removed = registry.async_get_device({("bridgeid", "4567")}, set()) assert entry.config_entries == {"456"} assert entry3_removed is None await hass.async_block_till_done() assert len(update_events) == 5 assert update_events[0]["action"] == "create" assert update_events[0]["device_id"] == entry.id assert update_events[1]["action"] == "update" assert update_events[1]["device_id"] == entry2.id assert update_events[2]["action"] == "create" assert update_events[2]["device_id"] == entry3.id assert update_events[3]["action"] == "update" assert update_events[3]["device_id"] == entry.id assert update_events[4]["action"] == "remove" assert update_events[4]["device_id"] == entry3.id async def test_deleted_device_removing_config_entries(hass, registry, update_events): """Make sure we do not get duplicate entries.""" entry = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry2 = registry.async_get_or_create( config_entry_id="456", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry3 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:78:CD:EF:12")}, identifiers={("bridgeid", "4567")}, manufacturer="manufacturer", model="model", ) assert len(registry.devices) == 2 assert len(registry.deleted_devices) == 0 assert entry.id == entry2.id assert entry.id != entry3.id assert entry2.config_entries == {"123", "456"} registry.async_remove_device(entry.id) registry.async_remove_device(entry3.id) assert len(registry.devices) == 0 assert len(registry.deleted_devices) == 2 await hass.async_block_till_done() assert len(update_events) == 5 assert update_events[0]["action"] == "create" assert update_events[0]["device_id"] == entry.id assert update_events[1]["action"] == "update" assert update_events[1]["device_id"] == entry2.id assert update_events[2]["action"] == "create" assert update_events[2]["device_id"] == entry3.id assert update_events[3]["action"] == "remove" assert update_events[3]["device_id"] == entry.id assert update_events[4]["action"] == "remove" assert update_events[4]["device_id"] == entry3.id registry.async_clear_config_entry("123") assert len(registry.devices) == 0 assert len(registry.deleted_devices) == 1 registry.async_clear_config_entry("456") assert len(registry.devices) == 0 assert len(registry.deleted_devices) == 0 # No event when a deleted device is purged await hass.async_block_till_done() assert len(update_events) == 5 # Re-add, expect new device id entry2 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) assert entry.id != entry2.id async def test_removing_area_id(registry): """Make sure we can clear area id.""" entry = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry_w_area = registry.async_update_device(entry.id, area_id="12345A") registry.async_clear_area_id("12345A") entry_wo_area = registry.async_get_device({("bridgeid", "0123")}, set()) assert not entry_wo_area.area_id assert entry_w_area != entry_wo_area async def test_deleted_device_removing_area_id(registry): """Make sure we can clear area id of deleted device.""" entry = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry_w_area = registry.async_update_device(entry.id, area_id="12345A") registry.async_remove_device(entry.id) registry.async_clear_area_id("12345A") entry2 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) assert entry.id == entry2.id entry_wo_area = registry.async_get_device({("bridgeid", "0123")}, set()) assert not entry_wo_area.area_id assert entry_w_area != entry_wo_area async def test_specifying_via_device_create(registry): """Test specifying a via_device and updating.""" via = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("hue", "0123")}, manufacturer="manufacturer", model="via", ) light = registry.async_get_or_create( config_entry_id="456", connections=set(), identifiers={("hue", "456")}, manufacturer="manufacturer", model="light", via_device=("hue", "0123"), ) assert light.via_device_id == via.id async def test_specifying_via_device_update(registry): """Test specifying a via_device and updating.""" light = registry.async_get_or_create( config_entry_id="456", connections=set(), identifiers={("hue", "456")}, manufacturer="manufacturer", model="light", via_device=("hue", "0123"), ) assert light.via_device_id is None via = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("hue", "0123")}, manufacturer="manufacturer", model="via", ) light = registry.async_get_or_create( config_entry_id="456", connections=set(), identifiers={("hue", "456")}, manufacturer="manufacturer", model="light", via_device=("hue", "0123"), ) assert light.via_device_id == via.id async def test_loading_saving_data(hass, registry): """Test that we load/save data correctly.""" orig_via = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("hue", "0123")}, manufacturer="manufacturer", model="via", name="Original Name", sw_version="Orig SW 1", entry_type="device", ) orig_light = registry.async_get_or_create( config_entry_id="456", connections=set(), identifiers={("hue", "456")}, manufacturer="manufacturer", model="light", via_device=("hue", "0123"), ) orig_light2 = registry.async_get_or_create( config_entry_id="456", connections=set(), identifiers={("hue", "789")}, manufacturer="manufacturer", model="light", via_device=("hue", "0123"), ) registry.async_remove_device(orig_light2.id) assert len(registry.devices) == 2 assert len(registry.deleted_devices) == 1 orig_via = registry.async_update_device( orig_via.id, area_id="mock-area-id", name_by_user="mock-name-by-user" ) # Now load written data in new registry registry2 = device_registry.DeviceRegistry(hass) await flush_store(registry._store) await registry2.async_load() # Ensure same order assert list(registry.devices) == list(registry2.devices) assert list(registry.deleted_devices) == list(registry2.deleted_devices) new_via = registry2.async_get_device({("hue", "0123")}, set()) new_light = registry2.async_get_device({("hue", "456")}, set()) assert orig_via == new_via assert orig_light == new_light async def test_no_unnecessary_changes(registry): """Make sure we do not consider devices changes.""" entry = registry.async_get_or_create( config_entry_id="1234", connections={("ethernet", "12:34:56:78:90:AB:CD:EF")}, identifiers={("hue", "456"), ("bla", "123")}, ) with patch( "homeassistant.helpers.device_registry.DeviceRegistry.async_schedule_save" ) as mock_save: entry2 = registry.async_get_or_create( config_entry_id="1234", identifiers={("hue", "456")} ) assert entry.id == entry2.id assert len(mock_save.mock_calls) == 0 async def test_format_mac(registry): """Make sure we normalize mac addresses.""" entry = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) for mac in ["123456ABCDEF", "123456abcdef", "12:34:56:ab:cd:ef", "1234.56ab.cdef"]: test_entry = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, mac)}, ) assert test_entry.id == entry.id, mac assert test_entry.connections == { (device_registry.CONNECTION_NETWORK_MAC, "12:34:56:ab:cd:ef") } # This should not raise for invalid in [ "invalid_mac", "123456ABCDEFG", # 1 extra char "12:34:56:ab:cdef", # not enough : "12:34:56:ab:cd:e:f", # too many : "1234.56abcdef", # not enough . "123.456.abc.def", # too many . ]: invalid_mac_entry = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, invalid)}, ) assert list(invalid_mac_entry.connections)[0][1] == invalid async def test_update(registry): """Verify that we can update some attributes of a device.""" entry = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("hue", "456"), ("bla", "123")}, ) new_identifiers = {("hue", "654"), ("bla", "321")} assert not entry.area_id assert not entry.name_by_user with patch.object(registry, "async_schedule_save") as mock_save: updated_entry = registry.async_update_device( entry.id, area_id="12345A", manufacturer="Test Producer", model="Test Model", name_by_user="Test Friendly Name", new_identifiers=new_identifiers, via_device_id="98765B", ) assert mock_save.call_count == 1 assert updated_entry != entry assert updated_entry.area_id == "12345A" assert updated_entry.manufacturer == "Test Producer" assert updated_entry.model == "Test Model" assert updated_entry.name_by_user == "Test Friendly Name" assert updated_entry.identifiers == new_identifiers assert updated_entry.via_device_id == "98765B" assert registry.async_get_device({("hue", "456")}, {}) is None assert registry.async_get_device({("bla", "123")}, {}) is None assert registry.async_get_device({("hue", "654")}, {}) == updated_entry assert registry.async_get_device({("bla", "321")}, {}) == updated_entry assert ( registry.async_get_device( {}, {(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")} ) == updated_entry ) assert registry.async_get(updated_entry.id) is not None async def test_update_remove_config_entries(hass, registry, update_events): """Make sure we do not get duplicate entries.""" entry = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry2 = registry.async_get_or_create( config_entry_id="456", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) entry3 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:78:CD:EF:12")}, identifiers={("bridgeid", "4567")}, manufacturer="manufacturer", model="model", ) assert len(registry.devices) == 2 assert entry.id == entry2.id assert entry.id != entry3.id assert entry2.config_entries == {"123", "456"} updated_entry = registry.async_update_device( entry2.id, remove_config_entry_id="123" ) removed_entry = registry.async_update_device( entry3.id, remove_config_entry_id="123" ) assert updated_entry.config_entries == {"456"} assert removed_entry is None removed_entry = registry.async_get_device({("bridgeid", "4567")}, set()) assert removed_entry is None await hass.async_block_till_done() assert len(update_events) == 5 assert update_events[0]["action"] == "create" assert update_events[0]["device_id"] == entry.id assert update_events[1]["action"] == "update" assert update_events[1]["device_id"] == entry2.id assert update_events[2]["action"] == "create" assert update_events[2]["device_id"] == entry3.id assert update_events[3]["action"] == "update" assert update_events[3]["device_id"] == entry.id assert update_events[4]["action"] == "remove" assert update_events[4]["device_id"] == entry3.id async def test_loading_race_condition(hass): """Test only one storage load called when concurrent loading occurred .""" with patch( "homeassistant.helpers.device_registry.DeviceRegistry.async_load" ) as mock_load: results = await asyncio.gather( device_registry.async_get_registry(hass), device_registry.async_get_registry(hass), ) mock_load.assert_called_once_with() assert results[0] == results[1] async def test_update_sw_version(registry): """Verify that we can update software version of a device.""" entry = registry.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bla", "123")}, ) assert not entry.sw_version sw_version = "0x20020263" with patch.object(registry, "async_schedule_save") as mock_save: updated_entry = registry.async_update_device(entry.id, sw_version=sw_version) assert mock_save.call_count == 1 assert updated_entry != entry assert updated_entry.sw_version == sw_version async def test_cleanup_device_registry(hass, registry): """Test cleanup works.""" config_entry = MockConfigEntry(domain="hue") config_entry.add_to_hass(hass) d1 = registry.async_get_or_create( identifiers={("hue", "d1")}, config_entry_id=config_entry.entry_id ) registry.async_get_or_create( identifiers={("hue", "d2")}, config_entry_id=config_entry.entry_id ) d3 = registry.async_get_or_create( identifiers={("hue", "d3")}, config_entry_id=config_entry.entry_id ) registry.async_get_or_create( identifiers={("something", "d4")}, config_entry_id="non_existing" ) ent_reg = await entity_registry.async_get_registry(hass) ent_reg.async_get_or_create("light", "hue", "e1", device_id=d1.id) ent_reg.async_get_or_create("light", "hue", "e2", device_id=d1.id) ent_reg.async_get_or_create("light", "hue", "e3", device_id=d3.id) device_registry.async_cleanup(hass, registry, ent_reg) assert registry.async_get_device({("hue", "d1")}, set()) is not None assert registry.async_get_device({("hue", "d2")}, set()) is not None assert registry.async_get_device({("hue", "d3")}, set()) is not None assert registry.async_get_device({("something", "d4")}, set()) is None async def test_cleanup_startup(hass): """Test we run a cleanup on startup.""" hass.state = CoreState.not_running await device_registry.async_get_registry(hass) with patch( "homeassistant.helpers.device_registry.Debouncer.async_call" ) as mock_call: hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert len(mock_call.mock_calls) == 1 async def test_cleanup_entity_registry_change(hass): """Test we run a cleanup when entity registry changes.""" await device_registry.async_get_registry(hass) ent_reg = await entity_registry.async_get_registry(hass) with patch( "homeassistant.helpers.device_registry.Debouncer.async_call" ) as mock_call: entity = ent_reg.async_get_or_create("light", "hue", "e1") await hass.async_block_till_done() assert len(mock_call.mock_calls) == 0 # Normal update does not trigger ent_reg.async_update_entity(entity.entity_id, name="updated") await hass.async_block_till_done() assert len(mock_call.mock_calls) == 0 # Device ID update triggers ent_reg.async_get_or_create("light", "hue", "e1", device_id="bla") await hass.async_block_till_done() assert len(mock_call.mock_calls) == 1 # Removal also triggers ent_reg.async_remove(entity.entity_id) await hass.async_block_till_done() assert len(mock_call.mock_calls) == 2 async def test_restore_device(hass, registry, update_events): """Make sure device id is stable.""" entry = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) assert len(registry.devices) == 1 assert len(registry.deleted_devices) == 0 registry.async_remove_device(entry.id) assert len(registry.devices) == 0 assert len(registry.deleted_devices) == 1 entry2 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:78:CD:EF:12")}, identifiers={("bridgeid", "4567")}, manufacturer="manufacturer", model="model", ) entry3 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, manufacturer="manufacturer", model="model", ) assert entry.id == entry3.id assert entry.id != entry2.id assert len(registry.devices) == 2 assert len(registry.deleted_devices) == 0 assert isinstance(entry3.config_entries, set) assert isinstance(entry3.connections, set) assert isinstance(entry3.identifiers, set) await hass.async_block_till_done() assert len(update_events) == 4 assert update_events[0]["action"] == "create" assert update_events[0]["device_id"] == entry.id assert update_events[1]["action"] == "remove" assert update_events[1]["device_id"] == entry.id assert update_events[2]["action"] == "create" assert update_events[2]["device_id"] == entry2.id assert update_events[3]["action"] == "create" assert update_events[3]["device_id"] == entry3.id async def test_restore_simple_device(hass, registry, update_events): """Make sure device id is stable.""" entry = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, ) assert len(registry.devices) == 1 assert len(registry.deleted_devices) == 0 registry.async_remove_device(entry.id) assert len(registry.devices) == 0 assert len(registry.deleted_devices) == 1 entry2 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:78:CD:EF:12")}, identifiers={("bridgeid", "4567")}, ) entry3 = registry.async_get_or_create( config_entry_id="123", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, identifiers={("bridgeid", "0123")}, ) assert entry.id == entry3.id assert entry.id != entry2.id assert len(registry.devices) == 2 assert len(registry.deleted_devices) == 0 await hass.async_block_till_done() assert len(update_events) == 4 assert update_events[0]["action"] == "create" assert update_events[0]["device_id"] == entry.id assert update_events[1]["action"] == "remove" assert update_events[1]["device_id"] == entry.id assert update_events[2]["action"] == "create" assert update_events[2]["device_id"] == entry2.id assert update_events[3]["action"] == "create" assert update_events[3]["device_id"] == entry3.id
mKeRix/home-assistant
tests/helpers/test_device_registry.py
homeassistant/helpers/template.py
"""Support for monitoring juicenet/juicepoint/juicebox based EVSE switches.""" from homeassistant.components.switch import SwitchEntity from .const import DOMAIN, JUICENET_API, JUICENET_COORDINATOR from .entity import JuiceNetDevice async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the JuiceNet switches.""" entities = [] juicenet_data = hass.data[DOMAIN][config_entry.entry_id] api = juicenet_data[JUICENET_API] coordinator = juicenet_data[JUICENET_COORDINATOR] for device in api.devices: entities.append(JuiceNetChargeNowSwitch(device, coordinator)) async_add_entities(entities) class JuiceNetChargeNowSwitch(JuiceNetDevice, SwitchEntity): """Implementation of a JuiceNet switch.""" def __init__(self, device, coordinator): """Initialise the switch.""" super().__init__(device, "charge_now", coordinator) @property def name(self): """Return the name of the device.""" return f"{self.device.name} Charge Now" @property def is_on(self): """Return true if switch is on.""" return self.device.override_time != 0 async def async_turn_on(self, **kwargs): """Charge now.""" await self.device.set_override(True) async def async_turn_off(self, **kwargs): """Don't charge now.""" await self.device.set_override(False)
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/juicenet/switch.py
"""Support for Melissa Climate A/C.""" import logging from homeassistant.components.climate import ClimateEntity from homeassistant.components.climate.const import ( FAN_AUTO, FAN_HIGH, FAN_LOW, FAN_MEDIUM, HVAC_MODE_AUTO, HVAC_MODE_COOL, HVAC_MODE_DRY, HVAC_MODE_FAN_ONLY, HVAC_MODE_HEAT, HVAC_MODE_OFF, SUPPORT_FAN_MODE, SUPPORT_TARGET_TEMPERATURE, ) from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, TEMP_CELSIUS from . import DATA_MELISSA _LOGGER = logging.getLogger(__name__) SUPPORT_FLAGS = SUPPORT_FAN_MODE | SUPPORT_TARGET_TEMPERATURE OP_MODES = [ HVAC_MODE_HEAT, HVAC_MODE_COOL, HVAC_MODE_DRY, HVAC_MODE_FAN_ONLY, HVAC_MODE_OFF, ] FAN_MODES = [FAN_AUTO, FAN_HIGH, FAN_MEDIUM, FAN_LOW] async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Iterate through and add all Melissa devices.""" api = hass.data[DATA_MELISSA] devices = (await api.async_fetch_devices()).values() all_devices = [] for device in devices: if device["type"] == "melissa": all_devices.append(MelissaClimate(api, device["serial_number"], device)) async_add_entities(all_devices) class MelissaClimate(ClimateEntity): """Representation of a Melissa Climate device.""" def __init__(self, api, serial_number, init_data): """Initialize the climate device.""" self._name = init_data["name"] self._api = api self._serial_number = serial_number self._data = init_data["controller_log"] self._state = None self._cur_settings = None @property def name(self): """Return the name of the thermostat, if any.""" return self._name @property def fan_mode(self): """Return the current fan mode.""" if self._cur_settings is not None: return self.melissa_fan_to_hass(self._cur_settings[self._api.FAN]) @property def current_temperature(self): """Return the current temperature.""" if self._data: return self._data[self._api.TEMP] @property def current_humidity(self): """Return the current humidity value.""" if self._data: return self._data[self._api.HUMIDITY] @property def target_temperature_step(self): """Return the supported step of target temperature.""" return PRECISION_WHOLE @property def hvac_mode(self): """Return the current operation mode.""" if self._cur_settings is None: return None is_on = self._cur_settings[self._api.STATE] in ( self._api.STATE_ON, self._api.STATE_IDLE, ) if not is_on: return HVAC_MODE_OFF return self.melissa_op_to_hass(self._cur_settings[self._api.MODE]) @property def hvac_modes(self): """Return the list of available operation modes.""" return OP_MODES @property def fan_modes(self): """List of available fan modes.""" return FAN_MODES @property def target_temperature(self): """Return the temperature we try to reach.""" if self._cur_settings is None: return None return self._cur_settings[self._api.TEMP] @property def temperature_unit(self): """Return the unit of measurement which this thermostat uses.""" return TEMP_CELSIUS @property def min_temp(self): """Return the minimum supported temperature for the thermostat.""" return 16 @property def max_temp(self): """Return the maximum supported temperature for the thermostat.""" return 30 @property def supported_features(self): """Return the list of supported features.""" return SUPPORT_FLAGS async def async_set_temperature(self, **kwargs): """Set new target temperature.""" temp = kwargs.get(ATTR_TEMPERATURE) await self.async_send({self._api.TEMP: temp}) async def async_set_fan_mode(self, fan_mode): """Set fan mode.""" melissa_fan_mode = self.hass_fan_to_melissa(fan_mode) await self.async_send({self._api.FAN: melissa_fan_mode}) async def async_set_hvac_mode(self, hvac_mode): """Set operation mode.""" if hvac_mode == HVAC_MODE_OFF: await self.async_send({self._api.STATE: self._api.STATE_OFF}) return mode = self.hass_mode_to_melissa(hvac_mode) await self.async_send( {self._api.MODE: mode, self._api.STATE: self._api.STATE_ON} ) async def async_send(self, value): """Send action to service.""" try: old_value = self._cur_settings.copy() self._cur_settings.update(value) except AttributeError: old_value = None if not await self._api.async_send( self._serial_number, "melissa", self._cur_settings ): self._cur_settings = old_value async def async_update(self): """Get latest data from Melissa.""" try: self._data = (await self._api.async_status(cached=True))[ self._serial_number ] self._cur_settings = ( await self._api.async_cur_settings(self._serial_number) )["controller"]["_relation"]["command_log"] except KeyError: _LOGGER.warning("Unable to update entity %s", self.entity_id) def melissa_op_to_hass(self, mode): """Translate Melissa modes to hass states.""" if mode == self._api.MODE_HEAT: return HVAC_MODE_HEAT if mode == self._api.MODE_COOL: return HVAC_MODE_COOL if mode == self._api.MODE_DRY: return HVAC_MODE_DRY if mode == self._api.MODE_FAN: return HVAC_MODE_FAN_ONLY _LOGGER.warning("Operation mode %s could not be mapped to hass", mode) return None def melissa_fan_to_hass(self, fan): """Translate Melissa fan modes to hass modes.""" if fan == self._api.FAN_AUTO: return HVAC_MODE_AUTO if fan == self._api.FAN_LOW: return FAN_LOW if fan == self._api.FAN_MEDIUM: return FAN_MEDIUM if fan == self._api.FAN_HIGH: return FAN_HIGH _LOGGER.warning("Fan mode %s could not be mapped to hass", fan) return None def hass_mode_to_melissa(self, mode): """Translate hass states to melissa modes.""" if mode == HVAC_MODE_HEAT: return self._api.MODE_HEAT if mode == HVAC_MODE_COOL: return self._api.MODE_COOL if mode == HVAC_MODE_DRY: return self._api.MODE_DRY if mode == HVAC_MODE_FAN_ONLY: return self._api.MODE_FAN _LOGGER.warning("Melissa have no setting for %s mode", mode) def hass_fan_to_melissa(self, fan): """Translate hass fan modes to melissa modes.""" if fan == HVAC_MODE_AUTO: return self._api.FAN_AUTO if fan == FAN_LOW: return self._api.FAN_LOW if fan == FAN_MEDIUM: return self._api.FAN_MEDIUM if fan == FAN_HIGH: return self._api.FAN_HIGH _LOGGER.warning("Melissa have no setting for %s fan mode", fan)
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/melissa/climate.py
"""Support for VELUX KLF 200 devices.""" import logging from pyvlx import PyVLX, PyVLXException import voluptuous as vol from homeassistant.const import CONF_HOST, CONF_PASSWORD, EVENT_HOMEASSISTANT_STOP from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv DOMAIN = "velux" DATA_VELUX = "data_velux" SUPPORTED_DOMAINS = ["cover", "scene"] _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( {vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PASSWORD): cv.string} ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the velux component.""" try: hass.data[DATA_VELUX] = VeluxModule(hass, config[DOMAIN]) hass.data[DATA_VELUX].setup() await hass.data[DATA_VELUX].async_start() except PyVLXException as ex: _LOGGER.exception("Can't connect to velux interface: %s", ex) return False for component in SUPPORTED_DOMAINS: hass.async_create_task( discovery.async_load_platform(hass, component, DOMAIN, {}, config) ) return True class VeluxModule: """Abstraction for velux component.""" def __init__(self, hass, domain_config): """Initialize for velux component.""" self.pyvlx = None self._hass = hass self._domain_config = domain_config def setup(self): """Velux component setup.""" async def on_hass_stop(event): """Close connection when hass stops.""" _LOGGER.debug("Velux interface terminated") await self.pyvlx.disconnect() async def async_reboot_gateway(service_call): await self.pyvlx.reboot_gateway() self._hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop) host = self._domain_config.get(CONF_HOST) password = self._domain_config.get(CONF_PASSWORD) self.pyvlx = PyVLX(host=host, password=password) self._hass.services.async_register( DOMAIN, "reboot_gateway", async_reboot_gateway ) async def async_start(self): """Start velux component.""" _LOGGER.debug("Velux interface started") await self.pyvlx.load_scenes() await self.pyvlx.load_nodes()
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/velux/__init__.py
"""Insteon base entity.""" import functools import logging from pyinsteon import devices from homeassistant.core import callback from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import Entity from .const import ( DOMAIN, SIGNAL_ADD_DEFAULT_LINKS, SIGNAL_LOAD_ALDB, SIGNAL_PRINT_ALDB, SIGNAL_REMOVE_ENTITY, SIGNAL_SAVE_DEVICES, STATE_NAME_LABEL_MAP, ) from .utils import print_aldb_to_log _LOGGER = logging.getLogger(__name__) class InsteonEntity(Entity): """INSTEON abstract base entity.""" def __init__(self, device, group): """Initialize the INSTEON binary sensor.""" self._insteon_device_group = device.groups[group] self._insteon_device = device def __hash__(self): """Return the hash of the Insteon Entity.""" return hash(self._insteon_device) @property def should_poll(self): """No polling needed.""" return False @property def address(self): """Return the address of the node.""" return str(self._insteon_device.address) @property def group(self): """Return the INSTEON group that the entity responds to.""" return self._insteon_device_group.group @property def unique_id(self) -> str: """Return a unique ID.""" if self._insteon_device_group.group == 0x01: uid = self._insteon_device.id else: uid = f"{self._insteon_device.id}_{self._insteon_device_group.group}" return uid @property def name(self): """Return the name of the node (used for Entity_ID).""" # Set a base description description = self._insteon_device.description if description is None: description = "Unknown Device" # Get an extension label if there is one extension = self._get_label() if extension: extension = f" {extension}" return f"{description} {self._insteon_device.address}{extension}" @property def device_state_attributes(self): """Provide attributes for display on device card.""" return {"insteon_address": self.address, "insteon_group": self.group} @property def device_info(self): """Return device information.""" return { "identifiers": {(DOMAIN, str(self._insteon_device.address))}, "name": f"{self._insteon_device.description} {self._insteon_device.address}", "model": f"{self._insteon_device.model} ({self._insteon_device.cat!r}, 0x{self._insteon_device.subcat:02x})", "sw_version": f"{self._insteon_device.firmware:02x} Engine Version: {self._insteon_device.engine_version}", "manufacturer": "Smart Home", "via_device": (DOMAIN, str(devices.modem.address)), } @callback def async_entity_update(self, name, address, value, group): """Receive notification from transport that new data exists.""" _LOGGER.debug( "Received update for device %s group %d value %s", address, group, value, ) self.async_write_ha_state() async def async_added_to_hass(self): """Register INSTEON update events.""" _LOGGER.debug( "Tracking updates for device %s group %d name %s", self.address, self.group, self._insteon_device_group.name, ) self._insteon_device_group.subscribe(self.async_entity_update) load_signal = f"{self.entity_id}_{SIGNAL_LOAD_ALDB}" self.async_on_remove( async_dispatcher_connect(self.hass, load_signal, self._async_read_aldb) ) print_signal = f"{self.entity_id}_{SIGNAL_PRINT_ALDB}" async_dispatcher_connect(self.hass, print_signal, self._print_aldb) default_links_signal = f"{self.entity_id}_{SIGNAL_ADD_DEFAULT_LINKS}" async_dispatcher_connect( self.hass, default_links_signal, self._async_add_default_links ) remove_signal = f"{self._insteon_device.address.id}_{SIGNAL_REMOVE_ENTITY}" self.async_on_remove( async_dispatcher_connect( self.hass, remove_signal, functools.partial(self.async_remove, force_remove=True), ) ) async def async_will_remove_from_hass(self): """Unsubscribe to INSTEON update events.""" _LOGGER.debug( "Remove tracking updates for device %s group %d name %s", self.address, self.group, self._insteon_device_group.name, ) self._insteon_device_group.unsubscribe(self.async_entity_update) async def _async_read_aldb(self, reload): """Call device load process and print to log.""" await self._insteon_device.aldb.async_load(refresh=reload) self._print_aldb() async_dispatcher_send(self.hass, SIGNAL_SAVE_DEVICES) def _print_aldb(self): """Print the device ALDB to the log file.""" print_aldb_to_log(self._insteon_device.aldb) def _get_label(self): """Get the device label for grouped devices.""" label = "" if len(self._insteon_device.groups) > 1: if self._insteon_device_group.name in STATE_NAME_LABEL_MAP: label = STATE_NAME_LABEL_MAP[self._insteon_device_group.name] else: label = f"Group {self.group:d}" return label async def _async_add_default_links(self): """Add default links between the device and the modem.""" await self._insteon_device.async_add_default_links()
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/insteon/insteon_entity.py
"""Config flow to configure the Toon component.""" import logging from typing import Any, Dict, List, Optional from toonapi import Agreement, Toon, ToonError import voluptuous as vol from homeassistant import config_entries from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler from .const import CONF_AGREEMENT, CONF_AGREEMENT_ID, CONF_MIGRATE, DOMAIN class ToonFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): """Handle a Toon config flow.""" CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_PUSH DOMAIN = DOMAIN VERSION = 2 agreements: Optional[List[Agreement]] = None data: Optional[Dict[str, Any]] = None @property def logger(self) -> logging.Logger: """Return logger.""" return logging.getLogger(__name__) async def async_oauth_create_entry(self, data: Dict[str, Any]) -> Dict[str, Any]: """Test connection and load up agreements.""" self.data = data toon = Toon( token=self.data["token"]["access_token"], session=async_get_clientsession(self.hass), ) try: self.agreements = await toon.agreements() except ToonError: return self.async_abort(reason="connection_error") if not self.agreements: return self.async_abort(reason="no_agreements") return await self.async_step_agreement() async def async_step_import( self, config: Optional[Dict[str, Any]] = None ) -> Dict[str, Any]: """Start a configuration flow based on imported data. This step is merely here to trigger "discovery" when the `toon` integration is listed in the user configuration, or when migrating from the version 1 schema. """ if config is not None and CONF_MIGRATE in config: self.context.update({CONF_MIGRATE: config[CONF_MIGRATE]}) else: await self._async_handle_discovery_without_unique_id() return await self.async_step_user() async def async_step_agreement( self, user_input: Dict[str, Any] = None ) -> Dict[str, Any]: """Select Toon agreement to add.""" if len(self.agreements) == 1: return await self._create_entry(self.agreements[0]) agreements_list = [ f"{agreement.street} {agreement.house_number}, {agreement.city}" for agreement in self.agreements ] if user_input is None: return self.async_show_form( step_id="agreement", data_schema=vol.Schema( {vol.Required(CONF_AGREEMENT): vol.In(agreements_list)} ), ) agreement_index = agreements_list.index(user_input[CONF_AGREEMENT]) return await self._create_entry(self.agreements[agreement_index]) async def _create_entry(self, agreement: Agreement) -> Dict[str, Any]: if CONF_MIGRATE in self.context: await self.hass.config_entries.async_remove(self.context[CONF_MIGRATE]) await self.async_set_unique_id(agreement.agreement_id) self._abort_if_unique_id_configured() self.data[CONF_AGREEMENT_ID] = agreement.agreement_id return self.async_create_entry( title=f"{agreement.street} {agreement.house_number}, {agreement.city}", data=self.data, )
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/toon/config_flow.py
"""Support for Bond fans.""" import logging import math from typing import Any, Callable, List, Optional, Tuple from bond_api import Action, BPUPSubscriptions, DeviceType, Direction from homeassistant.components.fan import ( DIRECTION_FORWARD, DIRECTION_REVERSE, SUPPORT_DIRECTION, SUPPORT_SET_SPEED, FanEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import Entity from homeassistant.util.percentage import ( percentage_to_ranged_value, ranged_value_to_percentage, ) from .const import BPUP_SUBS, DOMAIN, HUB from .entity import BondEntity from .utils import BondDevice, BondHub _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: Callable[[List[Entity], bool], None], ) -> None: """Set up Bond fan devices.""" data = hass.data[DOMAIN][entry.entry_id] hub: BondHub = data[HUB] bpup_subs: BPUPSubscriptions = data[BPUP_SUBS] fans = [ BondFan(hub, device, bpup_subs) for device in hub.devices if DeviceType.is_fan(device.type) ] async_add_entities(fans, True) class BondFan(BondEntity, FanEntity): """Representation of a Bond fan.""" def __init__(self, hub: BondHub, device: BondDevice, bpup_subs: BPUPSubscriptions): """Create HA entity representing Bond fan.""" super().__init__(hub, device, bpup_subs) self._power: Optional[bool] = None self._speed: Optional[int] = None self._direction: Optional[int] = None def _apply_state(self, state: dict): self._power = state.get("power") self._speed = state.get("speed") self._direction = state.get("direction") @property def supported_features(self) -> int: """Flag supported features.""" features = 0 if self._device.supports_speed(): features |= SUPPORT_SET_SPEED if self._device.supports_direction(): features |= SUPPORT_DIRECTION return features @property def _speed_range(self) -> Tuple[int, int]: """Return the range of speeds.""" return (1, self._device.props.get("max_speed", 3)) @property def percentage(self) -> Optional[str]: """Return the current speed percentage for the fan.""" if not self._speed or not self._power: return 0 return ranged_value_to_percentage(self._speed_range, self._speed) @property def current_direction(self) -> Optional[str]: """Return fan rotation direction.""" direction = None if self._direction == Direction.FORWARD: direction = DIRECTION_FORWARD elif self._direction == Direction.REVERSE: direction = DIRECTION_REVERSE return direction async def async_set_percentage(self, percentage: int) -> None: """Set the desired speed for the fan.""" _LOGGER.debug("async_set_percentage called with percentage %s", percentage) if percentage == 0: await self.async_turn_off() return bond_speed = math.ceil( percentage_to_ranged_value(self._speed_range, percentage) ) _LOGGER.debug( "async_set_percentage converted percentage %s to bond speed %s", percentage, bond_speed, ) await self._hub.bond.action( self._device.device_id, Action.set_speed(bond_speed) ) async def async_turn_on( self, speed: Optional[str] = None, percentage: Optional[int] = None, preset_mode: Optional[str] = None, **kwargs, ) -> None: """Turn on the fan.""" _LOGGER.debug("Fan async_turn_on called with percentage %s", percentage) if percentage is not None: await self.async_set_percentage(percentage) else: await self._hub.bond.action(self._device.device_id, Action.turn_on()) async def async_turn_off(self, **kwargs: Any) -> None: """Turn the fan off.""" await self._hub.bond.action(self._device.device_id, Action.turn_off()) async def async_set_direction(self, direction: str): """Set fan rotation direction.""" bond_direction = ( Direction.REVERSE if direction == DIRECTION_REVERSE else Direction.FORWARD ) await self._hub.bond.action( self._device.device_id, Action.set_direction(bond_direction) )
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/bond/fan.py
"""Support for Netgear LTE binary sensors.""" from homeassistant.components.binary_sensor import DOMAIN, BinarySensorEntity from homeassistant.exceptions import PlatformNotReady from . import CONF_MONITORED_CONDITIONS, DATA_KEY, LTEEntity from .sensor_types import BINARY_SENSOR_CLASSES async def async_setup_platform(hass, config, async_add_entities, discovery_info): """Set up Netgear LTE binary sensor devices.""" if discovery_info is None: return modem_data = hass.data[DATA_KEY].get_modem_data(discovery_info) if not modem_data or not modem_data.data: raise PlatformNotReady binary_sensor_conf = discovery_info[DOMAIN] monitored_conditions = binary_sensor_conf[CONF_MONITORED_CONDITIONS] binary_sensors = [] for sensor_type in monitored_conditions: binary_sensors.append(LTEBinarySensor(modem_data, sensor_type)) async_add_entities(binary_sensors) class LTEBinarySensor(LTEEntity, BinarySensorEntity): """Netgear LTE binary sensor entity.""" @property def is_on(self): """Return true if the binary sensor is on.""" return getattr(self.modem_data.data, self.sensor_type) @property def device_class(self): """Return the class of binary sensor.""" return BINARY_SENSOR_CLASSES[self.sensor_type]
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/netgear_lte/binary_sensor.py
"""Event parser and human readable log generator.""" from datetime import timedelta from itertools import groupby import json import re import sqlalchemy from sqlalchemy.orm import aliased from sqlalchemy.sql.expression import literal import voluptuous as vol from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED from homeassistant.components.history import sqlalchemy_filter_from_include_exclude_conf from homeassistant.components.http import HomeAssistantView from homeassistant.components.recorder.models import ( Events, States, process_timestamp_to_utc_isoformat, ) from homeassistant.components.recorder.util import session_scope from homeassistant.components.script import EVENT_SCRIPT_STARTED from homeassistant.const import ( ATTR_DOMAIN, ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_NAME, ATTR_SERVICE, EVENT_CALL_SERVICE, EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, EVENT_LOGBOOK_ENTRY, EVENT_STATE_CHANGED, HTTP_BAD_REQUEST, ) from homeassistant.core import DOMAIN as HA_DOMAIN, callback, split_entity_id from homeassistant.exceptions import InvalidEntityFormatError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entityfilter import ( INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA, convert_include_exclude_filter, generate_filter, ) from homeassistant.helpers.integration_platform import ( async_process_integration_platforms, ) from homeassistant.loader import bind_hass import homeassistant.util.dt as dt_util ENTITY_ID_JSON_TEMPLATE = '"entity_id": "{}"' ENTITY_ID_JSON_EXTRACT = re.compile('"entity_id": "([^"]+)"') DOMAIN_JSON_EXTRACT = re.compile('"domain": "([^"]+)"') ICON_JSON_EXTRACT = re.compile('"icon": "([^"]+)"') ATTR_MESSAGE = "message" CONTINUOUS_DOMAINS = ["proximity", "sensor"] DOMAIN = "logbook" GROUP_BY_MINUTES = 15 EMPTY_JSON_OBJECT = "{}" UNIT_OF_MEASUREMENT_JSON = '"unit_of_measurement":' HA_DOMAIN_ENTITY_ID = f"{HA_DOMAIN}." CONFIG_SCHEMA = vol.Schema( {DOMAIN: INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA}, extra=vol.ALLOW_EXTRA ) HOMEASSISTANT_EVENTS = [ EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, ] ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED = [ EVENT_LOGBOOK_ENTRY, EVENT_CALL_SERVICE, *HOMEASSISTANT_EVENTS, ] ALL_EVENT_TYPES = [ EVENT_STATE_CHANGED, *ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED, ] EVENT_COLUMNS = [ Events.event_type, Events.event_data, Events.time_fired, Events.context_id, Events.context_user_id, Events.context_parent_id, ] SCRIPT_AUTOMATION_EVENTS = [EVENT_AUTOMATION_TRIGGERED, EVENT_SCRIPT_STARTED] LOG_MESSAGE_SCHEMA = vol.Schema( { vol.Required(ATTR_NAME): cv.string, vol.Required(ATTR_MESSAGE): cv.template, vol.Optional(ATTR_DOMAIN): cv.slug, vol.Optional(ATTR_ENTITY_ID): cv.entity_id, } ) @bind_hass def log_entry(hass, name, message, domain=None, entity_id=None, context=None): """Add an entry to the logbook.""" hass.add_job(async_log_entry, hass, name, message, domain, entity_id, context) @bind_hass def async_log_entry(hass, name, message, domain=None, entity_id=None, context=None): """Add an entry to the logbook.""" data = {ATTR_NAME: name, ATTR_MESSAGE: message} if domain is not None: data[ATTR_DOMAIN] = domain if entity_id is not None: data[ATTR_ENTITY_ID] = entity_id hass.bus.async_fire(EVENT_LOGBOOK_ENTRY, data, context=context) async def async_setup(hass, config): """Logbook setup.""" hass.data[DOMAIN] = {} @callback def log_message(service): """Handle sending notification message service calls.""" message = service.data[ATTR_MESSAGE] name = service.data[ATTR_NAME] domain = service.data.get(ATTR_DOMAIN) entity_id = service.data.get(ATTR_ENTITY_ID) if entity_id is None and domain is None: # If there is no entity_id or # domain, the event will get filtered # away so we use the "logbook" domain domain = DOMAIN message.hass = hass message = message.async_render(parse_result=False) async_log_entry(hass, name, message, domain, entity_id) hass.components.frontend.async_register_built_in_panel( "logbook", "logbook", "hass:format-list-bulleted-type" ) conf = config.get(DOMAIN, {}) if conf: filters = sqlalchemy_filter_from_include_exclude_conf(conf) entities_filter = convert_include_exclude_filter(conf) else: filters = None entities_filter = None hass.http.register_view(LogbookView(conf, filters, entities_filter)) hass.services.async_register(DOMAIN, "log", log_message, schema=LOG_MESSAGE_SCHEMA) await async_process_integration_platforms(hass, DOMAIN, _process_logbook_platform) return True async def _process_logbook_platform(hass, domain, platform): """Process a logbook platform.""" @callback def _async_describe_event(domain, event_name, describe_callback): """Teach logbook how to describe a new event.""" hass.data[DOMAIN][event_name] = (domain, describe_callback) platform.async_describe_events(hass, _async_describe_event) class LogbookView(HomeAssistantView): """Handle logbook view requests.""" url = "/api/logbook" name = "api:logbook" extra_urls = ["/api/logbook/{datetime}"] def __init__(self, config, filters, entities_filter): """Initialize the logbook view.""" self.config = config self.filters = filters self.entities_filter = entities_filter async def get(self, request, datetime=None): """Retrieve logbook entries.""" if datetime: datetime = dt_util.parse_datetime(datetime) if datetime is None: return self.json_message("Invalid datetime", HTTP_BAD_REQUEST) else: datetime = dt_util.start_of_local_day() period = request.query.get("period") if period is None: period = 1 else: period = int(period) entity_ids = request.query.get("entity") if entity_ids: try: entity_ids = cv.entity_ids(entity_ids) except vol.Invalid: raise InvalidEntityFormatError( f"Invalid entity id(s) encountered: {entity_ids}. " "Format should be <domain>.<object_id>" ) from vol.Invalid end_time = request.query.get("end_time") if end_time is None: start_day = dt_util.as_utc(datetime) - timedelta(days=period - 1) end_day = start_day + timedelta(days=period) else: start_day = datetime end_day = dt_util.parse_datetime(end_time) if end_day is None: return self.json_message("Invalid end_time", HTTP_BAD_REQUEST) hass = request.app["hass"] entity_matches_only = "entity_matches_only" in request.query def json_events(): """Fetch events and generate JSON.""" return self.json( _get_events( hass, start_day, end_day, entity_ids, self.filters, self.entities_filter, entity_matches_only, ) ) return await hass.async_add_executor_job(json_events) def humanify(hass, events, entity_attr_cache, context_lookup): """Generate a converted list of events into Entry objects. Will try to group events if possible: - if 2+ sensor updates in GROUP_BY_MINUTES, show last - if Home Assistant stop and start happen in same minute call it restarted """ external_events = hass.data.get(DOMAIN, {}) # Group events in batches of GROUP_BY_MINUTES for _, g_events in groupby( events, lambda event: event.time_fired_minute // GROUP_BY_MINUTES ): events_batch = list(g_events) # Keep track of last sensor states last_sensor_event = {} # Group HA start/stop events # Maps minute of event to 1: stop, 2: stop + start start_stop_events = {} # Process events for event in events_batch: if event.event_type == EVENT_STATE_CHANGED: if event.domain in CONTINUOUS_DOMAINS: last_sensor_event[event.entity_id] = event elif event.event_type == EVENT_HOMEASSISTANT_STOP: if event.time_fired_minute in start_stop_events: continue start_stop_events[event.time_fired_minute] = 1 elif event.event_type == EVENT_HOMEASSISTANT_START: if event.time_fired_minute not in start_stop_events: continue start_stop_events[event.time_fired_minute] = 2 # Yield entries for event in events_batch: if event.event_type == EVENT_STATE_CHANGED: entity_id = event.entity_id domain = event.domain if ( domain in CONTINUOUS_DOMAINS and event != last_sensor_event[entity_id] ): # Skip all but the last sensor state continue data = { "when": event.time_fired_isoformat, "name": _entity_name_from_event( entity_id, event, entity_attr_cache ), "state": event.state, "entity_id": entity_id, } icon = event.attributes_icon if icon: data["icon"] = icon if event.context_user_id: data["context_user_id"] = event.context_user_id _augment_data_with_context( data, entity_id, event, context_lookup, entity_attr_cache, external_events, ) yield data elif event.event_type in external_events: domain, describe_event = external_events[event.event_type] data = describe_event(event) data["when"] = event.time_fired_isoformat data["domain"] = domain if event.context_user_id: data["context_user_id"] = event.context_user_id _augment_data_with_context( data, data.get(ATTR_ENTITY_ID), event, context_lookup, entity_attr_cache, external_events, ) yield data elif event.event_type == EVENT_HOMEASSISTANT_START: if start_stop_events.get(event.time_fired_minute) == 2: continue yield { "when": event.time_fired_isoformat, "name": "Home Assistant", "message": "started", "domain": HA_DOMAIN, } elif event.event_type == EVENT_HOMEASSISTANT_STOP: if start_stop_events.get(event.time_fired_minute) == 2: action = "restarted" else: action = "stopped" yield { "when": event.time_fired_isoformat, "name": "Home Assistant", "message": action, "domain": HA_DOMAIN, } elif event.event_type == EVENT_LOGBOOK_ENTRY: event_data = event.data domain = event_data.get(ATTR_DOMAIN) entity_id = event_data.get(ATTR_ENTITY_ID) if domain is None and entity_id is not None: try: domain = split_entity_id(str(entity_id))[0] except IndexError: pass data = { "when": event.time_fired_isoformat, "name": event_data.get(ATTR_NAME), "message": event_data.get(ATTR_MESSAGE), "domain": domain, "entity_id": entity_id, } if event.context_user_id: data["context_user_id"] = event.context_user_id _augment_data_with_context( data, entity_id, event, context_lookup, entity_attr_cache, external_events, ) yield data def _get_events( hass, start_day, end_day, entity_ids=None, filters=None, entities_filter=None, entity_matches_only=False, ): """Get events for a period of time.""" entity_attr_cache = EntityAttributeCache(hass) context_lookup = {None: None} def yield_events(query): """Yield Events that are not filtered away.""" for row in query.yield_per(1000): event = LazyEventPartialState(row) context_lookup.setdefault(event.context_id, event) if event.event_type == EVENT_CALL_SERVICE: continue if event.event_type == EVENT_STATE_CHANGED or _keep_event( hass, event, entities_filter ): yield event if entity_ids is not None: entities_filter = generate_filter([], entity_ids, [], []) with session_scope(hass=hass) as session: old_state = aliased(States, name="old_state") if entity_ids is not None: query = _generate_events_query_without_states(session) query = _apply_event_time_filter(query, start_day, end_day) query = _apply_event_types_filter( hass, query, ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED ) if entity_matches_only: # When entity_matches_only is provided, contexts and events that do not # contain the entity_ids are not included in the logbook response. query = _apply_event_entity_id_matchers(query, entity_ids) query = query.union_all( _generate_states_query( session, start_day, end_day, old_state, entity_ids ) ) else: query = _generate_events_query(session) query = _apply_event_time_filter(query, start_day, end_day) query = _apply_events_types_and_states_filter( hass, query, old_state ).filter( (States.last_updated == States.last_changed) | (Events.event_type != EVENT_STATE_CHANGED) ) if filters: query = query.filter( filters.entity_filter() | (Events.event_type != EVENT_STATE_CHANGED) ) query = query.order_by(Events.time_fired) return list( humanify(hass, yield_events(query), entity_attr_cache, context_lookup) ) def _generate_events_query(session): return session.query( *EVENT_COLUMNS, States.state, States.entity_id, States.domain, States.attributes, ) def _generate_events_query_without_states(session): return session.query( *EVENT_COLUMNS, literal(None).label("state"), literal(None).label("entity_id"), literal(None).label("domain"), literal(None).label("attributes"), ) def _generate_states_query(session, start_day, end_day, old_state, entity_ids): return ( _generate_events_query(session) .outerjoin(Events, (States.event_id == Events.event_id)) .outerjoin(old_state, (States.old_state_id == old_state.state_id)) .filter(_missing_state_matcher(old_state)) .filter(_continuous_entity_matcher()) .filter((States.last_updated > start_day) & (States.last_updated < end_day)) .filter( (States.last_updated == States.last_changed) & States.entity_id.in_(entity_ids) ) ) def _apply_events_types_and_states_filter(hass, query, old_state): events_query = ( query.outerjoin(States, (Events.event_id == States.event_id)) .outerjoin(old_state, (States.old_state_id == old_state.state_id)) .filter( (Events.event_type != EVENT_STATE_CHANGED) | _missing_state_matcher(old_state) ) .filter( (Events.event_type != EVENT_STATE_CHANGED) | _continuous_entity_matcher() ) ) return _apply_event_types_filter(hass, events_query, ALL_EVENT_TYPES) def _missing_state_matcher(old_state): # The below removes state change events that do not have # and old_state or the old_state is missing (newly added entities) # or the new_state is missing (removed entities) return sqlalchemy.and_( old_state.state_id.isnot(None), (States.state != old_state.state), States.state.isnot(None), ) def _continuous_entity_matcher(): # # Prefilter out continuous domains that have # ATTR_UNIT_OF_MEASUREMENT as its much faster in sql. # return sqlalchemy.or_( sqlalchemy.not_(States.domain.in_(CONTINUOUS_DOMAINS)), sqlalchemy.not_(States.attributes.contains(UNIT_OF_MEASUREMENT_JSON)), ) def _apply_event_time_filter(events_query, start_day, end_day): return events_query.filter( (Events.time_fired > start_day) & (Events.time_fired < end_day) ) def _apply_event_types_filter(hass, query, event_types): return query.filter( Events.event_type.in_(event_types + list(hass.data.get(DOMAIN, {}))) ) def _apply_event_entity_id_matchers(events_query, entity_ids): return events_query.filter( sqlalchemy.or_( *[ Events.event_data.contains(ENTITY_ID_JSON_TEMPLATE.format(entity_id)) for entity_id in entity_ids ] ) ) def _keep_event(hass, event, entities_filter): if event.event_type in HOMEASSISTANT_EVENTS: return entities_filter is None or entities_filter(HA_DOMAIN_ENTITY_ID) entity_id = event.data_entity_id if entity_id: return entities_filter is None or entities_filter(entity_id) if event.event_type in hass.data[DOMAIN]: # If the entity_id isn't described, use the domain that describes # the event for filtering. domain = hass.data[DOMAIN][event.event_type][0] else: domain = event.data_domain if domain is None: return False return entities_filter is None or entities_filter(f"{domain}.") def _augment_data_with_context( data, entity_id, event, context_lookup, entity_attr_cache, external_events ): context_event = context_lookup.get(event.context_id) if not context_event: return if event == context_event: # This is the first event with the given ID. Was it directly caused by # a parent event? if event.context_parent_id: context_event = context_lookup.get(event.context_parent_id) # Ensure the (parent) context_event exists and is not the root cause of # this log entry. if not context_event or event == context_event: return event_type = context_event.event_type context_entity_id = context_event.entity_id # State change if context_entity_id: data["context_entity_id"] = context_entity_id data["context_entity_id_name"] = _entity_name_from_event( context_entity_id, context_event, entity_attr_cache ) data["context_event_type"] = event_type return event_data = context_event.data # Call service if event_type == EVENT_CALL_SERVICE: event_data = context_event.data data["context_domain"] = event_data.get(ATTR_DOMAIN) data["context_service"] = event_data.get(ATTR_SERVICE) data["context_event_type"] = event_type return if not entity_id: return attr_entity_id = event_data.get(ATTR_ENTITY_ID) if not attr_entity_id or ( event_type in SCRIPT_AUTOMATION_EVENTS and attr_entity_id == entity_id ): return if context_event == event: return data["context_entity_id"] = attr_entity_id data["context_entity_id_name"] = _entity_name_from_event( attr_entity_id, context_event, entity_attr_cache ) data["context_event_type"] = event_type if event_type in external_events: domain, describe_event = external_events[event_type] data["context_domain"] = domain name = describe_event(context_event).get(ATTR_NAME) if name: data["context_name"] = name def _entity_name_from_event(entity_id, event, entity_attr_cache): """Extract the entity name from the event using the cache if possible.""" return entity_attr_cache.get( entity_id, ATTR_FRIENDLY_NAME, event ) or split_entity_id(entity_id)[1].replace("_", " ") class LazyEventPartialState: """A lazy version of core Event with limited State joined in.""" __slots__ = [ "_row", "_event_data", "_time_fired_isoformat", "_attributes", "event_type", "entity_id", "state", "domain", "context_id", "context_user_id", "context_parent_id", "time_fired_minute", ] def __init__(self, row): """Init the lazy event.""" self._row = row self._event_data = None self._time_fired_isoformat = None self._attributes = None self.event_type = self._row.event_type self.entity_id = self._row.entity_id self.state = self._row.state self.domain = self._row.domain self.context_id = self._row.context_id self.context_user_id = self._row.context_user_id self.context_parent_id = self._row.context_parent_id self.time_fired_minute = self._row.time_fired.minute @property def attributes_icon(self): """Extract the icon from the decoded attributes or json.""" if self._attributes: return self._attributes.get(ATTR_ICON) result = ICON_JSON_EXTRACT.search(self._row.attributes) return result and result.group(1) @property def data_entity_id(self): """Extract the entity id from the decoded data or json.""" if self._event_data: return self._event_data.get(ATTR_ENTITY_ID) result = ENTITY_ID_JSON_EXTRACT.search(self._row.event_data) return result and result.group(1) @property def data_domain(self): """Extract the domain from the decoded data or json.""" if self._event_data: return self._event_data.get(ATTR_DOMAIN) result = DOMAIN_JSON_EXTRACT.search(self._row.event_data) return result and result.group(1) @property def attributes(self): """State attributes.""" if not self._attributes: if ( self._row.attributes is None or self._row.attributes == EMPTY_JSON_OBJECT ): self._attributes = {} else: self._attributes = json.loads(self._row.attributes) return self._attributes @property def data(self): """Event data.""" if not self._event_data: if self._row.event_data == EMPTY_JSON_OBJECT: self._event_data = {} else: self._event_data = json.loads(self._row.event_data) return self._event_data @property def time_fired_isoformat(self): """Time event was fired in utc isoformat.""" if not self._time_fired_isoformat: self._time_fired_isoformat = process_timestamp_to_utc_isoformat( self._row.time_fired or dt_util.utcnow() ) return self._time_fired_isoformat class EntityAttributeCache: """A cache to lookup static entity_id attribute. This class should not be used to lookup attributes that are expected to change state. """ def __init__(self, hass): """Init the cache.""" self._hass = hass self._cache = {} def get(self, entity_id, attribute, event): """Lookup an attribute for an entity or get it from the cache.""" if entity_id in self._cache: if attribute in self._cache[entity_id]: return self._cache[entity_id][attribute] else: self._cache[entity_id] = {} current_state = self._hass.states.get(entity_id) if current_state: # Try the current state as its faster than decoding the # attributes self._cache[entity_id][attribute] = current_state.attributes.get(attribute) else: # If the entity has been removed, decode the attributes # instead self._cache[entity_id][attribute] = event.attributes.get(attribute) return self._cache[entity_id][attribute]
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/logbook/__init__.py
"""Support for Envisalink-based alarm control panels (Honeywell/DSC).""" import logging import voluptuous as vol from homeassistant.components.alarm_control_panel import ( FORMAT_NUMBER, AlarmControlPanelEntity, ) from homeassistant.components.alarm_control_panel.const import ( SUPPORT_ALARM_ARM_AWAY, SUPPORT_ALARM_ARM_HOME, SUPPORT_ALARM_ARM_NIGHT, SUPPORT_ALARM_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, CONF_CODE, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_ARMED_NIGHT, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_connect from . import ( CONF_PANIC, CONF_PARTITIONNAME, DATA_EVL, DOMAIN, PARTITION_SCHEMA, SIGNAL_KEYPAD_UPDATE, SIGNAL_PARTITION_UPDATE, EnvisalinkDevice, ) _LOGGER = logging.getLogger(__name__) SERVICE_ALARM_KEYPRESS = "alarm_keypress" ATTR_KEYPRESS = "keypress" ALARM_KEYPRESS_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_ids, vol.Required(ATTR_KEYPRESS): cv.string, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Perform the setup for Envisalink alarm panels.""" configured_partitions = discovery_info["partitions"] code = discovery_info[CONF_CODE] panic_type = discovery_info[CONF_PANIC] devices = [] for part_num in configured_partitions: device_config_data = PARTITION_SCHEMA(configured_partitions[part_num]) device = EnvisalinkAlarm( hass, part_num, device_config_data[CONF_PARTITIONNAME], code, panic_type, hass.data[DATA_EVL].alarm_state["partition"][part_num], hass.data[DATA_EVL], ) devices.append(device) async_add_entities(devices) @callback def alarm_keypress_handler(service): """Map services to methods on Alarm.""" entity_ids = service.data.get(ATTR_ENTITY_ID) keypress = service.data.get(ATTR_KEYPRESS) target_devices = [ device for device in devices if device.entity_id in entity_ids ] for device in target_devices: device.async_alarm_keypress(keypress) hass.services.async_register( DOMAIN, SERVICE_ALARM_KEYPRESS, alarm_keypress_handler, schema=ALARM_KEYPRESS_SCHEMA, ) return True class EnvisalinkAlarm(EnvisalinkDevice, AlarmControlPanelEntity): """Representation of an Envisalink-based alarm panel.""" def __init__( self, hass, partition_number, alarm_name, code, panic_type, info, controller ): """Initialize the alarm panel.""" self._partition_number = partition_number self._code = code self._panic_type = panic_type _LOGGER.debug("Setting up alarm: %s", alarm_name) super().__init__(alarm_name, info, controller) async def async_added_to_hass(self): """Register callbacks.""" self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_KEYPAD_UPDATE, self._update_callback ) ) self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_PARTITION_UPDATE, self._update_callback ) ) @callback def _update_callback(self, partition): """Update Home Assistant state, if needed.""" if partition is None or int(partition) == self._partition_number: self.async_write_ha_state() @property def code_format(self): """Regex for code format or None if no code is required.""" if self._code: return None return FORMAT_NUMBER @property def state(self): """Return the state of the device.""" state = STATE_UNKNOWN if self._info["status"]["alarm"]: state = STATE_ALARM_TRIGGERED elif self._info["status"]["armed_zero_entry_delay"]: state = STATE_ALARM_ARMED_NIGHT elif self._info["status"]["armed_away"]: state = STATE_ALARM_ARMED_AWAY elif self._info["status"]["armed_stay"]: state = STATE_ALARM_ARMED_HOME elif self._info["status"]["exit_delay"]: state = STATE_ALARM_PENDING elif self._info["status"]["entry_delay"]: state = STATE_ALARM_PENDING elif self._info["status"]["alpha"]: state = STATE_ALARM_DISARMED return state @property def supported_features(self) -> int: """Return the list of supported features.""" return ( SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY | SUPPORT_ALARM_ARM_NIGHT | SUPPORT_ALARM_TRIGGER ) async def async_alarm_disarm(self, code=None): """Send disarm command.""" if code: self.hass.data[DATA_EVL].disarm_partition(str(code), self._partition_number) else: self.hass.data[DATA_EVL].disarm_partition( str(self._code), self._partition_number ) async def async_alarm_arm_home(self, code=None): """Send arm home command.""" if code: self.hass.data[DATA_EVL].arm_stay_partition( str(code), self._partition_number ) else: self.hass.data[DATA_EVL].arm_stay_partition( str(self._code), self._partition_number ) async def async_alarm_arm_away(self, code=None): """Send arm away command.""" if code: self.hass.data[DATA_EVL].arm_away_partition( str(code), self._partition_number ) else: self.hass.data[DATA_EVL].arm_away_partition( str(self._code), self._partition_number ) async def async_alarm_trigger(self, code=None): """Alarm trigger command. Will be used to trigger a panic alarm.""" self.hass.data[DATA_EVL].panic_alarm(self._panic_type) async def async_alarm_arm_night(self, code=None): """Send arm night command.""" self.hass.data[DATA_EVL].arm_night_partition( str(code) if code else str(self._code), self._partition_number ) @callback def async_alarm_keypress(self, keypress=None): """Send custom keypress.""" if keypress: self.hass.data[DATA_EVL].keypresses_to_partition( self._partition_number, keypress )
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/envisalink/alarm_control_panel.py
"""Support for file notification.""" import os import voluptuous as vol from homeassistant.components.notify import ( ATTR_TITLE, ATTR_TITLE_DEFAULT, PLATFORM_SCHEMA, BaseNotificationService, ) from homeassistant.const import CONF_FILENAME import homeassistant.helpers.config_validation as cv import homeassistant.util.dt as dt_util CONF_TIMESTAMP = "timestamp" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_FILENAME): cv.string, vol.Optional(CONF_TIMESTAMP, default=False): cv.boolean, } ) def get_service(hass, config, discovery_info=None): """Get the file notification service.""" filename = config[CONF_FILENAME] timestamp = config[CONF_TIMESTAMP] return FileNotificationService(hass, filename, timestamp) class FileNotificationService(BaseNotificationService): """Implement the notification service for the File service.""" def __init__(self, hass, filename, add_timestamp): """Initialize the service.""" self.filepath = os.path.join(hass.config.config_dir, filename) self.add_timestamp = add_timestamp def send_message(self, message="", **kwargs): """Send a message to a file.""" with open(self.filepath, "a") as file: if os.stat(self.filepath).st_size == 0: title = f"{kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)} notifications (Log started: {dt_util.utcnow().isoformat()})\n{'-' * 80}\n" file.write(title) if self.add_timestamp: text = f"{dt_util.utcnow().isoformat()} {message}\n" else: text = f"{message}\n" file.write(text)
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/file/notify.py
"""Config flow for Islamic Prayer Times integration.""" import voluptuous as vol from homeassistant import config_entries from homeassistant.core import callback # pylint: disable=unused-import from .const import CALC_METHODS, CONF_CALC_METHOD, DEFAULT_CALC_METHOD, DOMAIN, NAME class IslamicPrayerFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle the Islamic Prayer config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL @staticmethod @callback def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return IslamicPrayerOptionsFlowHandler(config_entry) async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") if user_input is None: return self.async_show_form(step_id="user") return self.async_create_entry(title=NAME, data=user_input) async def async_step_import(self, import_config): """Import from config.""" return await self.async_step_user(user_input=import_config) class IslamicPrayerOptionsFlowHandler(config_entries.OptionsFlow): """Handle Islamic Prayer client options.""" def __init__(self, config_entry): """Initialize options flow.""" self.config_entry = config_entry async def async_step_init(self, user_input=None): """Manage options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) options = { vol.Optional( CONF_CALC_METHOD, default=self.config_entry.options.get( CONF_CALC_METHOD, DEFAULT_CALC_METHOD ), ): vol.In(CALC_METHODS) } return self.async_show_form(step_id="init", data_schema=vol.Schema(options))
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/islamic_prayer_times/config_flow.py
"""ONVIF event abstraction.""" import asyncio import datetime as dt from typing import Callable, Dict, List, Optional, Set from httpx import RemoteProtocolError, TransportError from onvif import ONVIFCamera, ONVIFService from zeep.exceptions import Fault from homeassistant.core import CALLBACK_TYPE, CoreState, HomeAssistant, callback from homeassistant.helpers.event import async_call_later from homeassistant.util import dt as dt_util from .const import LOGGER from .models import Event from .parsers import PARSERS UNHANDLED_TOPICS = set() SUBSCRIPTION_ERRORS = ( Fault, asyncio.TimeoutError, TransportError, ) class EventManager: """ONVIF Event Manager.""" def __init__(self, hass: HomeAssistant, device: ONVIFCamera, unique_id: str): """Initialize event manager.""" self.hass: HomeAssistant = hass self.device: ONVIFCamera = device self.unique_id: str = unique_id self.started: bool = False self._subscription: ONVIFService = None self._events: Dict[str, Event] = {} self._listeners: List[CALLBACK_TYPE] = [] self._unsub_refresh: Optional[CALLBACK_TYPE] = None super().__init__() @property def platforms(self) -> Set[str]: """Return platforms to setup.""" return {event.platform for event in self._events.values()} @callback def async_add_listener(self, update_callback: CALLBACK_TYPE) -> Callable[[], None]: """Listen for data updates.""" # This is the first listener, set up polling. if not self._listeners: self.async_schedule_pull() self._listeners.append(update_callback) @callback def remove_listener() -> None: """Remove update listener.""" self.async_remove_listener(update_callback) return remove_listener @callback def async_remove_listener(self, update_callback: CALLBACK_TYPE) -> None: """Remove data update.""" if update_callback in self._listeners: self._listeners.remove(update_callback) if not self._listeners and self._unsub_refresh: self._unsub_refresh() self._unsub_refresh = None async def async_start(self) -> bool: """Start polling events.""" if await self.device.create_pullpoint_subscription(): # Create subscription manager self._subscription = self.device.create_subscription_service( "PullPointSubscription" ) # Renew immediately await self.async_renew() # Initialize events pullpoint = self.device.create_pullpoint_service() try: await pullpoint.SetSynchronizationPoint() except SUBSCRIPTION_ERRORS: pass response = await pullpoint.PullMessages( {"MessageLimit": 100, "Timeout": dt.timedelta(seconds=5)} ) # Parse event initialization await self.async_parse_messages(response.NotificationMessage) self.started = True return True return False async def async_stop(self) -> None: """Unsubscribe from events.""" self._listeners = [] self.started = False if not self._subscription: return await self._subscription.Unsubscribe() self._subscription = None async def async_restart(self, _now: dt = None) -> None: """Restart the subscription assuming the camera rebooted.""" if not self.started: return if self._subscription: try: await self._subscription.Unsubscribe() except SUBSCRIPTION_ERRORS: pass # Ignored. The subscription may no longer exist. self._subscription = None try: restarted = await self.async_start() except SUBSCRIPTION_ERRORS: restarted = False if not restarted: LOGGER.warning( "Failed to restart ONVIF PullPoint subscription for '%s'. Retrying...", self.unique_id, ) # Try again in a minute self._unsub_refresh = async_call_later(self.hass, 60, self.async_restart) elif self._listeners: LOGGER.debug( "Restarted ONVIF PullPoint subscription for '%s'", self.unique_id ) self.async_schedule_pull() async def async_renew(self) -> None: """Renew subscription.""" if not self._subscription: return termination_time = ( (dt_util.utcnow() + dt.timedelta(days=1)) .isoformat(timespec="seconds") .replace("+00:00", "Z") ) await self._subscription.Renew(termination_time) def async_schedule_pull(self) -> None: """Schedule async_pull_messages to run.""" self._unsub_refresh = async_call_later(self.hass, 1, self.async_pull_messages) async def async_pull_messages(self, _now: dt = None) -> None: """Pull messages from device.""" if self.hass.state == CoreState.running: try: pullpoint = self.device.create_pullpoint_service() response = await pullpoint.PullMessages( {"MessageLimit": 100, "Timeout": dt.timedelta(seconds=60)} ) # Renew subscription if less than two hours is left if ( dt_util.as_utc(response.TerminationTime) - dt_util.utcnow() ).total_seconds() < 7200: await self.async_renew() except RemoteProtocolError: # Likley a shutdown event, nothing to see here return except SUBSCRIPTION_ERRORS as err: LOGGER.warning( "Failed to fetch ONVIF PullPoint subscription messages for '%s': %s", self.unique_id, err, ) # Treat errors as if the camera restarted. Assume that the pullpoint # subscription is no longer valid. self._unsub_refresh = None await self.async_restart() return # Parse response await self.async_parse_messages(response.NotificationMessage) # Update entities for update_callback in self._listeners: update_callback() # Reschedule another pull if self._listeners: self.async_schedule_pull() # pylint: disable=protected-access async def async_parse_messages(self, messages) -> None: """Parse notification message.""" for msg in messages: # Guard against empty message if not msg.Topic: continue topic = msg.Topic._value_1 parser = PARSERS.get(topic) if not parser: if topic not in UNHANDLED_TOPICS: LOGGER.info( "No registered handler for event from %s: %s", self.unique_id, msg, ) UNHANDLED_TOPICS.add(topic) continue event = await parser(self.unique_id, msg) if not event: LOGGER.warning("Unable to parse event from %s: %s", self.unique_id, msg) return self._events[event.uid] = event def get_uid(self, uid) -> Event: """Retrieve event for given id.""" return self._events[uid] def get_platform(self, platform) -> List[Event]: """Retrieve events for given platform.""" return [event for event in self._events.values() if event.platform == platform]
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/onvif/event.py
"""Support for USCIS Case Status.""" from datetime import timedelta import logging import uscisstatus import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONF_NAME from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "USCIS" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Required("case_id"): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the platform in Home Assistant and Case Information.""" uscis = UscisSensor(config["case_id"], config[CONF_NAME]) uscis.update() if uscis.valid_case_id: add_entities([uscis]) else: _LOGGER.error("Setup USCIS Sensor Fail check if your Case ID is Valid") class UscisSensor(Entity): """USCIS Sensor will check case status on daily basis.""" MIN_TIME_BETWEEN_UPDATES = timedelta(hours=24) CURRENT_STATUS = "current_status" LAST_CASE_UPDATE = "last_update_date" def __init__(self, case, name): """Initialize the sensor.""" self._state = None self._case_id = case self._attributes = None self.valid_case_id = None self._name = name @property def name(self): """Return the name.""" return self._name @property def state(self): """Return the state.""" return self._state @property def device_state_attributes(self): """Return the state attributes.""" return self._attributes @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Fetch data from the USCIS website and update state attributes.""" try: status = uscisstatus.get_case_status(self._case_id) self._attributes = {self.CURRENT_STATUS: status["status"]} self._state = status["date"] self.valid_case_id = True except ValueError: _LOGGER("Please Check that you have valid USCIS case id") self.valid_case_id = False
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/uscis/sensor.py
"""Config flow for Rollease Acmeda Automate Pulse Hub.""" import asyncio from typing import Dict, Optional import aiopulse import async_timeout import voluptuous as vol from homeassistant import config_entries from .const import DOMAIN # pylint: disable=unused-import class AcmedaFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a Acmeda config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize the config flow.""" self.discovered_hubs: Optional[Dict[str, aiopulse.Hub]] = None async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" if ( user_input is not None and self.discovered_hubs is not None and user_input["id"] in self.discovered_hubs ): return await self.async_create(self.discovered_hubs[user_input["id"]]) # Already configured hosts already_configured = { entry.unique_id for entry in self._async_current_entries() } hubs = [] try: with async_timeout.timeout(5): async for hub in aiopulse.Hub.discover(): if hub.id not in already_configured: hubs.append(hub) except asyncio.TimeoutError: pass if len(hubs) == 0: return self.async_abort(reason="no_devices_found") if len(hubs) == 1: return await self.async_create(hubs[0]) self.discovered_hubs = {hub.id: hub for hub in hubs} return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Required("id"): vol.In( {hub.id: f"{hub.id} {hub.host}" for hub in hubs} ) } ), ) async def async_create(self, hub): """Create the Acmeda Hub entry.""" await self.async_set_unique_id(hub.id, raise_on_progress=False) return self.async_create_entry(title=hub.id, data={"host": hub.host})
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/acmeda/config_flow.py
"""A sensor platform that give you information about the next space launch.""" from datetime import timedelta import logging from typing import Optional from pylaunches import PyLaunches, PyLaunchesException import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from .const import ( ATTR_AGENCY, ATTR_AGENCY_COUNTRY_CODE, ATTR_LAUNCH_TIME, ATTR_STREAM, ATTRIBUTION, DEFAULT_NAME, ) _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(hours=1) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string} ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Create the launch sensor.""" name = config[CONF_NAME] session = async_get_clientsession(hass) launches = PyLaunches(session) async_add_entities([LaunchLibrarySensor(launches, name)], True) class LaunchLibrarySensor(Entity): """Representation of a launch_library Sensor.""" def __init__(self, launches: PyLaunches, name: str) -> None: """Initialize the sensor.""" self.launches = launches self.next_launch = None self._name = name async def async_update(self) -> None: """Get the latest data.""" try: launches = await self.launches.upcoming_launches() except PyLaunchesException as exception: _LOGGER.error("Error getting data, %s", exception) else: if launches: self.next_launch = launches[0] @property def name(self) -> str: """Return the name of the sensor.""" return self._name @property def state(self) -> Optional[str]: """Return the state of the sensor.""" if self.next_launch: return self.next_launch.name return None @property def icon(self) -> str: """Return the icon of the sensor.""" return "mdi:rocket" @property def device_state_attributes(self) -> Optional[dict]: """Return attributes for the sensor.""" if self.next_launch: return { ATTR_LAUNCH_TIME: self.next_launch.net, ATTR_AGENCY: self.next_launch.launch_service_provider.name, ATTR_AGENCY_COUNTRY_CODE: self.next_launch.pad.location.country_code, ATTR_STREAM: self.next_launch.webcast_live, ATTR_ATTRIBUTION: ATTRIBUTION, } return None
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/launch_library/sensor.py
"""Reproduce an Timer state.""" import asyncio import logging from typing import Any, Dict, Iterable, Optional from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import Context, State from homeassistant.helpers.typing import HomeAssistantType from . import ( ATTR_DURATION, DOMAIN, SERVICE_CANCEL, SERVICE_PAUSE, SERVICE_START, STATUS_ACTIVE, STATUS_IDLE, STATUS_PAUSED, ) _LOGGER = logging.getLogger(__name__) VALID_STATES = {STATUS_IDLE, STATUS_ACTIVE, STATUS_PAUSED} async def _async_reproduce_state( hass: HomeAssistantType, state: State, *, context: Optional[Context] = None, reproduce_options: Optional[Dict[str, Any]] = None, ) -> None: """Reproduce a single state.""" cur_state = hass.states.get(state.entity_id) if cur_state is None: _LOGGER.warning("Unable to find entity %s", state.entity_id) return if state.state not in VALID_STATES: _LOGGER.warning( "Invalid state specified for %s: %s", state.entity_id, state.state ) return # Return if we are already at the right state. if cur_state.state == state.state and cur_state.attributes.get( ATTR_DURATION ) == state.attributes.get(ATTR_DURATION): return service_data = {ATTR_ENTITY_ID: state.entity_id} if state.state == STATUS_ACTIVE: service = SERVICE_START if ATTR_DURATION in state.attributes: service_data[ATTR_DURATION] = state.attributes[ATTR_DURATION] elif state.state == STATUS_PAUSED: service = SERVICE_PAUSE elif state.state == STATUS_IDLE: service = SERVICE_CANCEL await hass.services.async_call( DOMAIN, service, service_data, context=context, blocking=True ) async def async_reproduce_states( hass: HomeAssistantType, states: Iterable[State], *, context: Optional[Context] = None, reproduce_options: Optional[Dict[str, Any]] = None, ) -> None: """Reproduce Timer states.""" await asyncio.gather( *( _async_reproduce_state( hass, state, context=context, reproduce_options=reproduce_options ) for state in states ) )
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/timer/reproduce_state.py
"""Support for Neato sensors.""" from datetime import timedelta import logging from pybotvac.exceptions import NeatoRobotException from homeassistant.components.sensor import DEVICE_CLASS_BATTERY from homeassistant.const import PERCENTAGE from homeassistant.helpers.entity import Entity from .const import NEATO_DOMAIN, NEATO_LOGIN, NEATO_ROBOTS, SCAN_INTERVAL_MINUTES _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(minutes=SCAN_INTERVAL_MINUTES) BATTERY = "Battery" async def async_setup_entry(hass, entry, async_add_entities): """Set up the Neato sensor using config entry.""" dev = [] neato = hass.data.get(NEATO_LOGIN) for robot in hass.data[NEATO_ROBOTS]: dev.append(NeatoSensor(neato, robot)) if not dev: return _LOGGER.debug("Adding robots for sensors %s", dev) async_add_entities(dev, True) class NeatoSensor(Entity): """Neato sensor.""" def __init__(self, neato, robot): """Initialize Neato sensor.""" self.robot = robot self._available = False self._robot_name = f"{self.robot.name} {BATTERY}" self._robot_serial = self.robot.serial self._state = None def update(self): """Update Neato Sensor.""" try: self._state = self.robot.state except NeatoRobotException as ex: if self._available: _LOGGER.error( "Neato sensor connection error for '%s': %s", self.entity_id, ex ) self._state = None self._available = False return self._available = True _LOGGER.debug("self._state=%s", self._state) @property def name(self): """Return the name of this sensor.""" return self._robot_name @property def unique_id(self): """Return unique ID.""" return self._robot_serial @property def device_class(self): """Return the device class.""" return DEVICE_CLASS_BATTERY @property def available(self): """Return availability.""" return self._available @property def state(self): """Return the state.""" return self._state["details"]["charge"] @property def unit_of_measurement(self): """Return unit of measurement.""" return PERCENTAGE @property def device_info(self): """Device info for neato robot.""" return {"identifiers": {(NEATO_DOMAIN, self._robot_serial)}}
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/neato/sensor.py
"""Support for Synology DSM binary sensors.""" from typing import Dict from homeassistant.components.binary_sensor import BinarySensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DISKS from homeassistant.helpers.typing import HomeAssistantType from . import SynologyDSMDeviceEntity, SynologyDSMDispatcherEntity from .const import ( DOMAIN, SECURITY_BINARY_SENSORS, STORAGE_DISK_BINARY_SENSORS, SYNO_API, UPGRADE_BINARY_SENSORS, ) async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up the Synology NAS binary sensor.""" api = hass.data[DOMAIN][entry.unique_id][SYNO_API] entities = [ SynoDSMSecurityBinarySensor( api, sensor_type, SECURITY_BINARY_SENSORS[sensor_type] ) for sensor_type in SECURITY_BINARY_SENSORS ] entities += [ SynoDSMUpgradeBinarySensor( api, sensor_type, UPGRADE_BINARY_SENSORS[sensor_type] ) for sensor_type in UPGRADE_BINARY_SENSORS ] # Handle all disks if api.storage.disks_ids: for disk in entry.data.get(CONF_DISKS, api.storage.disks_ids): entities += [ SynoDSMStorageBinarySensor( api, sensor_type, STORAGE_DISK_BINARY_SENSORS[sensor_type], disk ) for sensor_type in STORAGE_DISK_BINARY_SENSORS ] async_add_entities(entities) class SynoDSMSecurityBinarySensor(SynologyDSMDispatcherEntity, BinarySensorEntity): """Representation a Synology Security binary sensor.""" @property def is_on(self) -> bool: """Return the state.""" return getattr(self._api.security, self.entity_type) != "safe" @property def available(self) -> bool: """Return True if entity is available.""" return bool(self._api.security) @property def device_state_attributes(self) -> Dict[str, str]: """Return security checks details.""" return self._api.security.status_by_check class SynoDSMStorageBinarySensor(SynologyDSMDeviceEntity, BinarySensorEntity): """Representation a Synology Storage binary sensor.""" @property def is_on(self) -> bool: """Return the state.""" return getattr(self._api.storage, self.entity_type)(self._device_id) class SynoDSMUpgradeBinarySensor(SynologyDSMDispatcherEntity, BinarySensorEntity): """Representation a Synology Upgrade binary sensor.""" @property def is_on(self) -> bool: """Return the state.""" return getattr(self._api.upgrade, self.entity_type) @property def available(self) -> bool: """Return True if entity is available.""" return bool(self._api.upgrade)
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/synology_dsm/binary_sensor.py
"""Support for Powerview scenes from a Powerview hub.""" from typing import Any from aiopvapi.resources.scene import Scene as PvScene import voluptuous as vol from homeassistant.components.scene import Scene from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import CONF_HOST, CONF_PLATFORM import homeassistant.helpers.config_validation as cv from .const import ( COORDINATOR, DEVICE_INFO, DOMAIN, HUB_ADDRESS, PV_API, PV_ROOM_DATA, PV_SCENE_DATA, ROOM_NAME_UNICODE, STATE_ATTRIBUTE_ROOM_NAME, ) from .entity import HDEntity PLATFORM_SCHEMA = vol.Schema( {vol.Required(CONF_PLATFORM): DOMAIN, vol.Required(HUB_ADDRESS): cv.string} ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Import platform from yaml.""" hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_HOST: config[HUB_ADDRESS]}, ) ) async def async_setup_entry(hass, entry, async_add_entities): """Set up powerview scene entries.""" pv_data = hass.data[DOMAIN][entry.entry_id] room_data = pv_data[PV_ROOM_DATA] scene_data = pv_data[PV_SCENE_DATA] pv_request = pv_data[PV_API] coordinator = pv_data[COORDINATOR] device_info = pv_data[DEVICE_INFO] pvscenes = ( PowerViewScene( PvScene(raw_scene, pv_request), room_data, coordinator, device_info ) for scene_id, raw_scene in scene_data.items() ) async_add_entities(pvscenes) class PowerViewScene(HDEntity, Scene): """Representation of a Powerview scene.""" def __init__(self, scene, room_data, coordinator, device_info): """Initialize the scene.""" super().__init__(coordinator, device_info, scene.id) self._scene = scene self._room_name = room_data.get(scene.room_id, {}).get(ROOM_NAME_UNICODE, "") @property def name(self): """Return the name of the scene.""" return self._scene.name @property def device_state_attributes(self): """Return the state attributes.""" return {STATE_ATTRIBUTE_ROOM_NAME: self._room_name} @property def icon(self): """Icon to use in the frontend.""" return "mdi:blinds" async def async_activate(self, **kwargs: Any) -> None: """Activate scene. Try to get entities into requested state.""" await self._scene.activate()
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/hunterdouglas_powerview/scene.py
"""The Global Disaster Alert and Coordination System (GDACS) integration.""" import asyncio from datetime import timedelta import logging from aio_georss_gdacs import GdacsFeedManager import voluptuous as vol from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_SCAN_INTERVAL, CONF_UNIT_SYSTEM_IMPERIAL, LENGTH_MILES, ) from homeassistant.core import callback from homeassistant.helpers import aiohttp_client, config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_track_time_interval from homeassistant.util.unit_system import METRIC_SYSTEM from .const import ( CONF_CATEGORIES, DEFAULT_RADIUS, DEFAULT_SCAN_INTERVAL, DOMAIN, FEED, PLATFORMS, VALID_CATEGORIES, ) _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Inclusive(CONF_LATITUDE, "coordinates"): cv.latitude, vol.Inclusive(CONF_LONGITUDE, "coordinates"): cv.longitude, vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS): vol.Coerce(float), vol.Optional( CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL ): cv.time_period, vol.Optional(CONF_CATEGORIES, default=[]): vol.All( cv.ensure_list, [vol.In(VALID_CATEGORIES)] ), } ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the GDACS component.""" if DOMAIN not in config: return True conf = config[DOMAIN] latitude = conf.get(CONF_LATITUDE, hass.config.latitude) longitude = conf.get(CONF_LONGITUDE, hass.config.longitude) scan_interval = conf[CONF_SCAN_INTERVAL] categories = conf[CONF_CATEGORIES] hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data={ CONF_LATITUDE: latitude, CONF_LONGITUDE: longitude, CONF_RADIUS: conf[CONF_RADIUS], CONF_SCAN_INTERVAL: scan_interval, CONF_CATEGORIES: categories, }, ) ) return True async def async_setup_entry(hass, config_entry): """Set up the GDACS component as config entry.""" hass.data.setdefault(DOMAIN, {}) feeds = hass.data[DOMAIN].setdefault(FEED, {}) radius = config_entry.data[CONF_RADIUS] if hass.config.units.name == CONF_UNIT_SYSTEM_IMPERIAL: radius = METRIC_SYSTEM.length(radius, LENGTH_MILES) # Create feed entity manager for all platforms. manager = GdacsFeedEntityManager(hass, config_entry, radius) feeds[config_entry.entry_id] = manager _LOGGER.debug("Feed entity manager added for %s", config_entry.entry_id) await manager.async_init() return True async def async_unload_entry(hass, config_entry): """Unload an GDACS component config entry.""" manager = hass.data[DOMAIN][FEED].pop(config_entry.entry_id) await manager.async_stop() await asyncio.wait( [ hass.config_entries.async_forward_entry_unload(config_entry, domain) for domain in PLATFORMS ] ) return True class GdacsFeedEntityManager: """Feed Entity Manager for GDACS feed.""" def __init__(self, hass, config_entry, radius_in_km): """Initialize the Feed Entity Manager.""" self._hass = hass self._config_entry = config_entry coordinates = ( config_entry.data[CONF_LATITUDE], config_entry.data[CONF_LONGITUDE], ) categories = config_entry.data[CONF_CATEGORIES] websession = aiohttp_client.async_get_clientsession(hass) self._feed_manager = GdacsFeedManager( websession, self._generate_entity, self._update_entity, self._remove_entity, coordinates, filter_radius=radius_in_km, filter_categories=categories, status_async_callback=self._status_update, ) self._config_entry_id = config_entry.entry_id self._scan_interval = timedelta(seconds=config_entry.data[CONF_SCAN_INTERVAL]) self._track_time_remove_callback = None self._status_info = None self.listeners = [] async def async_init(self): """Schedule initial and regular updates based on configured time interval.""" for domain in PLATFORMS: self._hass.async_create_task( self._hass.config_entries.async_forward_entry_setup( self._config_entry, domain ) ) async def update(event_time): """Update.""" await self.async_update() # Trigger updates at regular intervals. self._track_time_remove_callback = async_track_time_interval( self._hass, update, self._scan_interval ) _LOGGER.debug("Feed entity manager initialized") async def async_update(self): """Refresh data.""" await self._feed_manager.update() _LOGGER.debug("Feed entity manager updated") async def async_stop(self): """Stop this feed entity manager from refreshing.""" for unsub_dispatcher in self.listeners: unsub_dispatcher() self.listeners = [] if self._track_time_remove_callback: self._track_time_remove_callback() _LOGGER.debug("Feed entity manager stopped") @callback def async_event_new_entity(self): """Return manager specific event to signal new entity.""" return f"gdacs_new_geolocation_{self._config_entry_id}" def get_entry(self, external_id): """Get feed entry by external id.""" return self._feed_manager.feed_entries.get(external_id) def status_info(self): """Return latest status update info received.""" return self._status_info async def _generate_entity(self, external_id): """Generate new entity.""" async_dispatcher_send( self._hass, self.async_event_new_entity(), self, self._config_entry.unique_id, external_id, ) async def _update_entity(self, external_id): """Update entity.""" async_dispatcher_send(self._hass, f"gdacs_update_{external_id}") async def _remove_entity(self, external_id): """Remove entity.""" async_dispatcher_send(self._hass, f"gdacs_delete_{external_id}") async def _status_update(self, status_info): """Propagate status update.""" _LOGGER.debug("Status update received: %s", status_info) self._status_info = status_info async_dispatcher_send(self._hass, f"gdacs_status_{self._config_entry_id}")
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/gdacs/__init__.py
"""Support for Nexia / Trane XL Thermostats.""" from homeassistant.components.binary_sensor import BinarySensorEntity from .const import DOMAIN, NEXIA_DEVICE, UPDATE_COORDINATOR from .entity import NexiaThermostatEntity async def async_setup_entry(hass, config_entry, async_add_entities): """Set up sensors for a Nexia device.""" nexia_data = hass.data[DOMAIN][config_entry.entry_id] nexia_home = nexia_data[NEXIA_DEVICE] coordinator = nexia_data[UPDATE_COORDINATOR] entities = [] for thermostat_id in nexia_home.get_thermostat_ids(): thermostat = nexia_home.get_thermostat_by_id(thermostat_id) entities.append( NexiaBinarySensor( coordinator, thermostat, "is_blower_active", "Blower Active" ) ) if thermostat.has_emergency_heat(): entities.append( NexiaBinarySensor( coordinator, thermostat, "is_emergency_heat_active", "Emergency Heat Active", ) ) async_add_entities(entities, True) class NexiaBinarySensor(NexiaThermostatEntity, BinarySensorEntity): """Provices Nexia BinarySensor support.""" def __init__(self, coordinator, thermostat, sensor_call, sensor_name): """Initialize the nexia sensor.""" super().__init__( coordinator, thermostat, name=f"{thermostat.get_name()} {sensor_name}", unique_id=f"{thermostat.thermostat_id}_{sensor_call}", ) self._call = sensor_call self._state = None @property def is_on(self): """Return the status of the sensor.""" return getattr(self._thermostat, self._call)()
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/nexia/binary_sensor.py
"""Support for Twilio.""" from twilio.rest import Client from twilio.twiml import TwiML import voluptuous as vol from homeassistant.const import CONF_WEBHOOK_ID from homeassistant.helpers import config_entry_flow import homeassistant.helpers.config_validation as cv from .const import DOMAIN CONF_ACCOUNT_SID = "account_sid" CONF_AUTH_TOKEN = "auth_token" DATA_TWILIO = DOMAIN RECEIVED_DATA = f"{DOMAIN}_data_received" CONFIG_SCHEMA = vol.Schema( { vol.Optional(DOMAIN): vol.Schema( { vol.Required(CONF_ACCOUNT_SID): cv.string, vol.Required(CONF_AUTH_TOKEN): cv.string, } ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the Twilio component.""" if DOMAIN not in config: return True conf = config[DOMAIN] hass.data[DATA_TWILIO] = Client( conf.get(CONF_ACCOUNT_SID), conf.get(CONF_AUTH_TOKEN) ) return True async def handle_webhook(hass, webhook_id, request): """Handle incoming webhook from Twilio for inbound messages and calls.""" data = dict(await request.post()) data["webhook_id"] = webhook_id hass.bus.async_fire(RECEIVED_DATA, dict(data)) return TwiML().to_xml() async def async_setup_entry(hass, entry): """Configure based on config entry.""" hass.components.webhook.async_register( DOMAIN, "Twilio", entry.data[CONF_WEBHOOK_ID], handle_webhook ) return True async def async_unload_entry(hass, entry): """Unload a config entry.""" hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID]) return True async_remove_entry = config_entry_flow.webhook_async_remove_entry
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/twilio/__init__.py
"""Support for Lupusec Security System switches.""" from datetime import timedelta import lupupy.constants as CONST from homeassistant.components.switch import SwitchEntity from . import DOMAIN as LUPUSEC_DOMAIN, LupusecDevice SCAN_INTERVAL = timedelta(seconds=2) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up Lupusec switch devices.""" if discovery_info is None: return data = hass.data[LUPUSEC_DOMAIN] devices = [] for device in data.lupusec.get_devices(generic_type=CONST.TYPE_SWITCH): devices.append(LupusecSwitch(data, device)) add_entities(devices) class LupusecSwitch(LupusecDevice, SwitchEntity): """Representation of a Lupusec switch.""" def turn_on(self, **kwargs): """Turn on the device.""" self._device.switch_on() def turn_off(self, **kwargs): """Turn off the device.""" self._device.switch_off() @property def is_on(self): """Return true if device is on.""" return self._device.is_on
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/lupusec/switch.py
"""Reproduce an input boolean state.""" import asyncio import logging from typing import Any, Dict, Iterable, Optional from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, State from homeassistant.helpers.typing import HomeAssistantType from . import DOMAIN _LOGGER = logging.getLogger(__name__) async def _async_reproduce_states( hass: HomeAssistantType, state: State, *, context: Optional[Context] = None, reproduce_options: Optional[Dict[str, Any]] = None, ) -> None: """Reproduce input boolean states.""" cur_state = hass.states.get(state.entity_id) if cur_state is None: _LOGGER.warning("Unable to find entity %s", state.entity_id) return if state.state not in (STATE_ON, STATE_OFF): _LOGGER.warning( "Invalid state specified for %s: %s", state.entity_id, state.state ) return if cur_state.state == state.state: return service = SERVICE_TURN_ON if state.state == STATE_ON else SERVICE_TURN_OFF await hass.services.async_call( DOMAIN, service, {ATTR_ENTITY_ID: state.entity_id}, context=context, blocking=True, ) async def async_reproduce_states( hass: HomeAssistantType, states: Iterable[State], *, context: Optional[Context] = None, reproduce_options: Optional[Dict[str, Any]] = None, ) -> None: """Reproduce component states.""" await asyncio.gather( *( _async_reproduce_states( hass, state, context=context, reproduce_options=reproduce_options ) for state in states ) )
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/input_boolean/reproduce_state.py
"""The ATAG Integration.""" from datetime import timedelta import logging import async_timeout from pyatag import AtagException, AtagOne from homeassistant.components.climate import DOMAIN as CLIMATE from homeassistant.components.sensor import DOMAIN as SENSOR from homeassistant.components.water_heater import DOMAIN as WATER_HEATER from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, asyncio from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) _LOGGER = logging.getLogger(__name__) DOMAIN = "atag" PLATFORMS = [CLIMATE, WATER_HEATER, SENSOR] async def async_setup(hass: HomeAssistant, config): """Set up the Atag component.""" return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up Atag integration from a config entry.""" session = async_get_clientsession(hass) coordinator = AtagDataUpdateCoordinator(hass, session, entry) await coordinator.async_refresh() if not coordinator.last_update_success: raise ConfigEntryNotReady hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.entry_id] = coordinator if entry.unique_id is None: hass.config_entries.async_update_entry(entry, unique_id=coordinator.atag.id) for platform in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, platform) ) return True class AtagDataUpdateCoordinator(DataUpdateCoordinator): """Define an object to hold Atag data.""" def __init__(self, hass, session, entry): """Initialize.""" self.atag = AtagOne(session=session, **entry.data) super().__init__( hass, _LOGGER, name=DOMAIN, update_interval=timedelta(seconds=30) ) async def _async_update_data(self): """Update data via library.""" with async_timeout.timeout(20): try: if not await self.atag.update(): raise UpdateFailed("No data received") except AtagException as error: raise UpdateFailed(error) from error return self.atag.report async def async_unload_entry(hass, entry): """Unload Atag config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, component) for component in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok class AtagEntity(CoordinatorEntity): """Defines a base Atag entity.""" def __init__(self, coordinator: AtagDataUpdateCoordinator, atag_id: str) -> None: """Initialize the Atag entity.""" super().__init__(coordinator) self._id = atag_id self._name = DOMAIN.title() @property def device_info(self) -> dict: """Return info for device registry.""" device = self.coordinator.atag.id version = self.coordinator.atag.apiversion return { "identifiers": {(DOMAIN, device)}, "name": "Atag Thermostat", "model": "Atag One", "sw_version": version, "manufacturer": "Atag", } @property def name(self) -> str: """Return the name of the entity.""" return self._name @property def unique_id(self): """Return a unique ID to use for this entity.""" return f"{self.coordinator.atag.id}-{self._id}"
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/atag/__init__.py
"""The Dune HD component.""" import asyncio from pdunehd import DuneHDPlayer from homeassistant.const import CONF_HOST from .const import DOMAIN PLATFORMS = ["media_player"] async def async_setup(hass, config): """Set up the Dune HD component.""" return True async def async_setup_entry(hass, config_entry): """Set up a config entry.""" host = config_entry.data[CONF_HOST] player = DuneHDPlayer(host) hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][config_entry.entry_id] = player for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, component) ) return True async def async_unload_entry(hass, config_entry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(config_entry, component) for component in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(config_entry.entry_id) return unload_ok
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/dunehd/__init__.py
"""Provides device trigger for lights.""" from typing import List import voluptuous as vol from homeassistant.components.automation import AutomationActionType from homeassistant.components.device_automation import toggle_entity from homeassistant.const import CONF_DOMAIN from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers.typing import ConfigType from . import DOMAIN TRIGGER_SCHEMA = toggle_entity.TRIGGER_SCHEMA.extend( {vol.Required(CONF_DOMAIN): DOMAIN} ) async def async_attach_trigger( hass: HomeAssistant, config: ConfigType, action: AutomationActionType, automation_info: dict, ) -> CALLBACK_TYPE: """Listen for state changes based on configuration.""" return await toggle_entity.async_attach_trigger( hass, config, action, automation_info ) async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]: """List device triggers.""" return await toggle_entity.async_get_triggers(hass, device_id, DOMAIN) async def async_get_trigger_capabilities(hass: HomeAssistant, config: dict) -> dict: """List trigger capabilities.""" return await toggle_entity.async_get_trigger_capabilities(hass, config)
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/light/device_trigger.py
"""Support for switch controlled using a telnet connection.""" from datetime import timedelta import logging import telnetlib import voluptuous as vol from homeassistant.components.switch import ( ENTITY_ID_FORMAT, PLATFORM_SCHEMA, SwitchEntity, ) from homeassistant.const import ( CONF_COMMAND_OFF, CONF_COMMAND_ON, CONF_COMMAND_STATE, CONF_NAME, CONF_PORT, CONF_RESOURCE, CONF_SWITCHES, CONF_TIMEOUT, CONF_VALUE_TEMPLATE, ) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DEFAULT_PORT = 23 DEFAULT_TIMEOUT = 0.2 SWITCH_SCHEMA = vol.Schema( { vol.Required(CONF_COMMAND_OFF): cv.string, vol.Required(CONF_COMMAND_ON): cv.string, vol.Required(CONF_RESOURCE): cv.string, vol.Optional(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_COMMAND_STATE): cv.string, vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): vol.Coerce(float), } ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_SWITCHES): cv.schema_with_slug_keys(SWITCH_SCHEMA)} ) SCAN_INTERVAL = timedelta(seconds=10) def setup_platform(hass, config, add_entities, discovery_info=None): """Find and return switches controlled by telnet commands.""" devices = config.get(CONF_SWITCHES, {}) switches = [] for object_id, device_config in devices.items(): value_template = device_config.get(CONF_VALUE_TEMPLATE) if value_template is not None: value_template.hass = hass switches.append( TelnetSwitch( hass, object_id, device_config.get(CONF_RESOURCE), device_config.get(CONF_PORT), device_config.get(CONF_NAME, object_id), device_config.get(CONF_COMMAND_ON), device_config.get(CONF_COMMAND_OFF), device_config.get(CONF_COMMAND_STATE), value_template, device_config.get(CONF_TIMEOUT), ) ) if not switches: _LOGGER.error("No switches added") return add_entities(switches) class TelnetSwitch(SwitchEntity): """Representation of a switch that can be toggled using telnet commands.""" def __init__( self, hass, object_id, resource, port, friendly_name, command_on, command_off, command_state, value_template, timeout, ): """Initialize the switch.""" self._hass = hass self.entity_id = ENTITY_ID_FORMAT.format(object_id) self._resource = resource self._port = port self._name = friendly_name self._state = False self._command_on = command_on self._command_off = command_off self._command_state = command_state self._value_template = value_template self._timeout = timeout def _telnet_command(self, command): try: telnet = telnetlib.Telnet(self._resource, self._port) telnet.write(command.encode("ASCII") + b"\r") response = telnet.read_until(b"\r", timeout=self._timeout) _LOGGER.debug("telnet response: %s", response.decode("ASCII").strip()) return response.decode("ASCII").strip() except OSError as error: _LOGGER.error( 'Command "%s" failed with exception: %s', command, repr(error) ) return None @property def name(self): """Return the name of the switch.""" return self._name @property def should_poll(self): """Only poll if we have state command.""" return self._command_state is not None @property def is_on(self): """Return true if device is on.""" return self._state @property def assumed_state(self): """Return true if no state command is defined, false otherwise.""" return self._command_state is None def update(self): """Update device state.""" response = self._telnet_command(self._command_state) if response: rendered = self._value_template.render_with_possible_json_value(response) self._state = rendered == "True" else: _LOGGER.warning("Empty response for command: %s", self._command_state) def turn_on(self, **kwargs): """Turn the device on.""" self._telnet_command(self._command_on) if self.assumed_state: self._state = True def turn_off(self, **kwargs): """Turn the device off.""" self._telnet_command(self._command_off) if self.assumed_state: self._state = False
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/telnet/switch.py
"""Support for Google Nest SDM Cameras.""" import datetime import logging from typing import Optional from google_nest_sdm.camera_traits import ( CameraEventImageTrait, CameraImageTrait, CameraLiveStreamTrait, ) from google_nest_sdm.device import Device from google_nest_sdm.exceptions import GoogleNestException from haffmpeg.tools import IMAGE_JPEG from homeassistant.components.camera import SUPPORT_STREAM, Camera from homeassistant.components.ffmpeg import async_get_image from homeassistant.config_entries import ConfigEntry from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.helpers.typing import HomeAssistantType from homeassistant.util.dt import utcnow from .const import DATA_SUBSCRIBER, DOMAIN from .device_info import DeviceInfo _LOGGER = logging.getLogger(__name__) # Used to schedule an alarm to refresh the stream before expiration STREAM_EXPIRATION_BUFFER = datetime.timedelta(seconds=30) async def async_setup_sdm_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up the cameras.""" subscriber = hass.data[DOMAIN][DATA_SUBSCRIBER] try: device_manager = await subscriber.async_get_device_manager() except GoogleNestException as err: raise PlatformNotReady from err # Fetch initial data so we have data when entities subscribe. entities = [] for device in device_manager.devices.values(): if ( CameraImageTrait.NAME in device.traits or CameraLiveStreamTrait.NAME in device.traits ): entities.append(NestCamera(device)) async_add_entities(entities) class NestCamera(Camera): """Devices that support cameras.""" def __init__(self, device: Device): """Initialize the camera.""" super().__init__() self._device = device self._device_info = DeviceInfo(device) self._stream = None self._stream_refresh_unsub = None # Cache of most recent event image self._event_id = None self._event_image_bytes = None self._event_image_cleanup_unsub = None @property def should_poll(self) -> bool: """Disable polling since entities have state pushed via pubsub.""" return False @property def unique_id(self) -> Optional[str]: """Return a unique ID.""" # The API "name" field is a unique device identifier. return f"{self._device.name}-camera" @property def name(self): """Return the name of the camera.""" return self._device_info.device_name @property def device_info(self): """Return device specific attributes.""" return self._device_info.device_info @property def brand(self): """Return the camera brand.""" return self._device_info.device_brand @property def model(self): """Return the camera model.""" return self._device_info.device_model @property def supported_features(self): """Flag supported features.""" supported_features = 0 if CameraLiveStreamTrait.NAME in self._device.traits: supported_features |= SUPPORT_STREAM return supported_features async def stream_source(self): """Return the source of the stream.""" if CameraLiveStreamTrait.NAME not in self._device.traits: return None trait = self._device.traits[CameraLiveStreamTrait.NAME] if not self._stream: _LOGGER.debug("Fetching stream url") self._stream = await trait.generate_rtsp_stream() self._schedule_stream_refresh() if self._stream.expires_at < utcnow(): _LOGGER.warning("Stream already expired") return self._stream.rtsp_stream_url def _schedule_stream_refresh(self): """Schedules an alarm to refresh the stream url before expiration.""" _LOGGER.debug("New stream url expires at %s", self._stream.expires_at) refresh_time = self._stream.expires_at - STREAM_EXPIRATION_BUFFER # Schedule an alarm to extend the stream if self._stream_refresh_unsub is not None: self._stream_refresh_unsub() self._stream_refresh_unsub = async_track_point_in_utc_time( self.hass, self._handle_stream_refresh, refresh_time, ) async def _handle_stream_refresh(self, now): """Alarm that fires to check if the stream should be refreshed.""" if not self._stream: return _LOGGER.debug("Extending stream url") try: self._stream = await self._stream.extend_rtsp_stream() except GoogleNestException as err: _LOGGER.debug("Failed to extend stream: %s", err) # Next attempt to catch a url will get a new one self._stream = None return # Update the stream worker with the latest valid url if self.stream: self.stream.update_source(self._stream.rtsp_stream_url) self._schedule_stream_refresh() async def async_will_remove_from_hass(self): """Invalidates the RTSP token when unloaded.""" if self._stream: _LOGGER.debug("Invalidating stream") await self._stream.stop_rtsp_stream() if self._stream_refresh_unsub: self._stream_refresh_unsub() self._event_id = None self._event_image_bytes = None if self._event_image_cleanup_unsub is not None: self._event_image_cleanup_unsub() async def async_added_to_hass(self): """Run when entity is added to register update signal handler.""" self.async_on_remove( self._device.add_update_listener(self.async_write_ha_state) ) async def async_camera_image(self): """Return bytes of camera image.""" # Returns the snapshot of the last event for ~30 seconds after the event active_event_image = await self._async_active_event_image() if active_event_image: return active_event_image # Fetch still image from the live stream stream_url = await self.stream_source() if not stream_url: return None return await async_get_image(self.hass, stream_url, output_format=IMAGE_JPEG) async def _async_active_event_image(self): """Return image from any active events happening.""" if CameraEventImageTrait.NAME not in self._device.traits: return None trait = self._device.active_event_trait if not trait: return None # Reuse image bytes if they have already been fetched event = trait.last_event if self._event_id is not None and self._event_id == event.event_id: return self._event_image_bytes _LOGGER.debug("Generating event image URL for event_id %s", event.event_id) image_bytes = await self._async_fetch_active_event_image(trait) if image_bytes is None: return None self._event_id = event.event_id self._event_image_bytes = image_bytes self._schedule_event_image_cleanup(event.expires_at) return image_bytes async def _async_fetch_active_event_image(self, trait): """Return image bytes for an active event.""" try: event_image = await trait.generate_active_event_image() except GoogleNestException as err: _LOGGER.debug("Unable to generate event image URL: %s", err) return None if not event_image: return None try: return await event_image.contents() except GoogleNestException as err: _LOGGER.debug("Unable to fetch event image: %s", err) return None def _schedule_event_image_cleanup(self, point_in_time): """Schedules an alarm to remove the image bytes from memory, honoring expiration.""" if self._event_image_cleanup_unsub is not None: self._event_image_cleanup_unsub() self._event_image_cleanup_unsub = async_track_point_in_utc_time( self.hass, self._handle_event_image_cleanup, point_in_time, ) def _handle_event_image_cleanup(self, now): """Clear images cached from events and scheduled callback.""" self._event_id = None self._event_image_bytes = None self._event_image_cleanup_unsub = None
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/nest/camera_sdm.py
"""Support for testing internet speed via Speedtest.net.""" from datetime import timedelta import logging import speedtest import voluptuous as vol from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ( CONF_MONITORED_CONDITIONS, CONF_SCAN_INTERVAL, EVENT_HOMEASSISTANT_STARTED, ) from homeassistant.core import CoreState, callback from homeassistant.exceptions import ConfigEntryNotReady import homeassistant.helpers.config_validation as cv from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( CONF_MANUAL, CONF_SERVER_ID, DEFAULT_SCAN_INTERVAL, DEFAULT_SERVER, DOMAIN, SENSOR_TYPES, SPEED_TEST_SERVICE, ) _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional(CONF_SERVER_ID): cv.positive_int, vol.Optional( CONF_SCAN_INTERVAL, default=timedelta(minutes=DEFAULT_SCAN_INTERVAL) ): cv.positive_time_period, vol.Optional(CONF_MANUAL, default=False): cv.boolean, vol.Optional( CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES) ): vol.All(cv.ensure_list, [vol.In(list(SENSOR_TYPES))]), } ) }, extra=vol.ALLOW_EXTRA, ) def server_id_valid(server_id): """Check if server_id is valid.""" try: api = speedtest.Speedtest() api.get_servers([int(server_id)]) except (speedtest.ConfigRetrievalError, speedtest.NoMatchedServers): return False return True async def async_setup(hass, config): """Import integration from config.""" if DOMAIN in config: hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN] ) ) return True async def async_setup_entry(hass, config_entry): """Set up the Speedtest.net component.""" coordinator = SpeedTestDataCoordinator(hass, config_entry) await coordinator.async_setup() async def _enable_scheduled_speedtests(*_): """Activate the data update coordinator.""" coordinator.update_interval = timedelta( minutes=config_entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) ) await coordinator.async_refresh() if not config_entry.options[CONF_MANUAL]: if hass.state == CoreState.running: await _enable_scheduled_speedtests() if not coordinator.last_update_success: raise ConfigEntryNotReady else: # Running a speed test during startup can prevent # integrations from being able to setup because it # can saturate the network interface. hass.bus.async_listen_once( EVENT_HOMEASSISTANT_STARTED, _enable_scheduled_speedtests ) hass.data[DOMAIN] = coordinator hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, "sensor") ) return True async def async_unload_entry(hass, config_entry): """Unload SpeedTest Entry from config_entry.""" hass.services.async_remove(DOMAIN, SPEED_TEST_SERVICE) hass.data[DOMAIN].async_unload() await hass.config_entries.async_forward_entry_unload(config_entry, "sensor") hass.data.pop(DOMAIN) return True class SpeedTestDataCoordinator(DataUpdateCoordinator): """Get the latest data from speedtest.net.""" def __init__(self, hass, config_entry): """Initialize the data object.""" self.hass = hass self.config_entry = config_entry self.api = None self.servers = {} self._unsub_update_listener = None super().__init__( self.hass, _LOGGER, name=DOMAIN, update_method=self.async_update, ) def update_servers(self): """Update list of test servers.""" try: server_list = self.api.get_servers() except speedtest.ConfigRetrievalError: _LOGGER.debug("Error retrieving server list") return self.servers[DEFAULT_SERVER] = {} for server in sorted( server_list.values(), key=lambda server: server[0]["country"] + server[0]["sponsor"], ): self.servers[ f"{server[0]['country']} - {server[0]['sponsor']} - {server[0]['name']}" ] = server[0] def update_data(self): """Get the latest data from speedtest.net.""" self.update_servers() self.api.closest.clear() if self.config_entry.options.get(CONF_SERVER_ID): server_id = self.config_entry.options.get(CONF_SERVER_ID) self.api.get_servers(servers=[server_id]) self.api.get_best_server() _LOGGER.debug( "Executing speedtest.net speed test with server_id: %s", self.api.best["id"] ) self.api.download() self.api.upload() return self.api.results.dict() async def async_update(self, *_): """Update Speedtest data.""" try: return await self.hass.async_add_executor_job(self.update_data) except (speedtest.ConfigRetrievalError, speedtest.NoMatchedServers) as err: raise UpdateFailed from err async def async_set_options(self): """Set options for entry.""" if not self.config_entry.options: data = {**self.config_entry.data} options = { CONF_SCAN_INTERVAL: data.pop(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL), CONF_MANUAL: data.pop(CONF_MANUAL, False), CONF_SERVER_ID: str(data.pop(CONF_SERVER_ID, "")), } self.hass.config_entries.async_update_entry( self.config_entry, data=data, options=options ) async def async_setup(self): """Set up SpeedTest.""" try: self.api = await self.hass.async_add_executor_job(speedtest.Speedtest) except speedtest.ConfigRetrievalError as err: raise ConfigEntryNotReady from err async def request_update(call): """Request update.""" await self.async_request_refresh() await self.async_set_options() await self.hass.async_add_executor_job(self.update_servers) self.hass.services.async_register(DOMAIN, SPEED_TEST_SERVICE, request_update) self._unsub_update_listener = self.config_entry.add_update_listener( options_updated_listener ) @callback def async_unload(self): """Unload the coordinator.""" if not self._unsub_update_listener: return self._unsub_update_listener() self._unsub_update_listener = None async def options_updated_listener(hass, entry): """Handle options update.""" if entry.options[CONF_MANUAL]: hass.data[DOMAIN].update_interval = None return hass.data[DOMAIN].update_interval = timedelta( minutes=entry.options[CONF_SCAN_INTERVAL] ) await hass.data[DOMAIN].async_request_refresh()
"""The tests for the Jewish calendar sensors.""" from datetime import datetime as dt, timedelta import pytest from homeassistant.components import jewish_calendar from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( HDATE_DEFAULT_ALTITUDE, alter_time, make_jerusalem_test_params, make_nyc_test_params, ) from tests.common import async_fire_time_changed async def test_jewish_calendar_min_config(hass): """Test minimum jewish calendar configuration.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None async def test_jewish_calendar_hebrew(hass): """Test jewish calendar sensor with language set to hebrew.""" assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}} ) await hass.async_block_till_done() assert hass.states.get("sensor.jewish_calendar_date") is not None TEST_PARAMS = [ (dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"), ( dt(2018, 9, 3), "UTC", 31.778, 35.235, "hebrew", "date", False, 'כ"ג אלול ה\' תשע"ח', ), (dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"), ( dt(2018, 9, 10), "UTC", 31.778, 35.235, "english", "holiday", False, "Rosh Hashana I", ), ( dt(2018, 9, 8), "UTC", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "נצבים", ), ( dt(2018, 9, 8), "America/New_York", 40.7128, -74.0060, "hebrew", "t_set_hakochavim", True, dt(2018, 9, 8, 19, 48), ), ( dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "t_set_hakochavim", False, dt(2018, 9, 8, 19, 21), ), ( dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "parshat_hashavua", False, "לך לך", ), ( dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ה' מרחשוון ה' תשע\"ט", ), ( dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235, "hebrew", "date", False, "ו' מרחשוון ה' תשע\"ט", ), ] TEST_IDS = [ "date_output", "date_output_hebrew", "holiday", "holiday_english", "torah_reading", "first_stars_ny", "first_stars_jerusalem", "torah_reading_weekday", "date_before_sunset", "date_after_sunset", ] @pytest.mark.parametrize( [ "now", "tzname", "latitude", "longitude", "language", "sensor", "diaspora", "result", ], TEST_PARAMS, ids=TEST_IDS, ) async def test_jewish_calendar_sensor( hass, legacy_patchable_time, now, tzname, latitude, longitude, language, sensor, diaspora, result, ): """Test Jewish calendar sensor output.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = ( dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result ) sensor_object = hass.states.get(f"sensor.test_{sensor}") assert sensor_object.state == str(result) if sensor == "holiday": assert sensor_object.attributes.get("id") == "rosh_hashana_i" assert sensor_object.attributes.get("type") == "YOM_TOV" assert sensor_object.attributes.get("type_id") == 1 SHABBAT_PARAMS = [ make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 16, 0), { "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 22), "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, havdalah_offset=50, ), make_nyc_test_params( dt(2018, 9, 1, 20, 0), { "english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14), "english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15), "english_upcoming_havdalah": dt(2018, 9, 1, 20, 14), "english_parshat_hashavua": "Ki Tavo", "hebrew_parshat_hashavua": "כי תבוא", }, ), make_nyc_test_params( dt(2018, 9, 1, 20, 21), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 7, 13, 1), { "english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_havdalah": dt(2018, 9, 8, 20, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4), "english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2), "english_parshat_hashavua": "Nitzavim", "hebrew_parshat_hashavua": "נצבים", }, ), make_nyc_test_params( dt(2018, 9, 8, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Erev Rosh Hashana", "hebrew_holiday": "ערב ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 9, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 10, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1), "english_upcoming_havdalah": dt(2018, 9, 11, 19, 57), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52), "english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50), "english_parshat_hashavua": "Vayeilech", "hebrew_parshat_hashavua": "וילך", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_nyc_test_params( dt(2018, 9, 28, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_havdalah": dt(2018, 9, 29, 19, 25), "english_upcoming_shabbat_candle_lighting": dt(2018, 9, 28, 18, 28), "english_upcoming_shabbat_havdalah": dt(2018, 9, 29, 19, 25), "english_parshat_hashavua": "none", "hebrew_parshat_hashavua": "none", }, ), make_nyc_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_nyc_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_nyc_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 25), "english_upcoming_havdalah": dt(2018, 10, 2, 19, 20), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 17), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 19, 13), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Simchat Torah", "hebrew_holiday": "שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Hoshana Raba", "hebrew_holiday": "הושענא רבה", }, ), make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 10), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 2), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", "english_holiday": "Shmini Atzeret", "hebrew_holiday": "שמיני עצרת", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 56), "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 3), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 56), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", }, ), make_nyc_test_params( dt(2016, 6, 11, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_shabbat_havdalah": "unknown", "english_parshat_hashavua": "Bamidbar", "hebrew_parshat_hashavua": "במדבר", "english_holiday": "Erev Shavuot", "hebrew_holiday": "ערב שבועות", }, ), make_nyc_test_params( dt(2016, 6, 12, 8, 25), { "english_upcoming_candle_lighting": dt(2016, 6, 10, 20, 7), "english_upcoming_havdalah": dt(2016, 6, 13, 21, 17), "english_upcoming_shabbat_candle_lighting": dt(2016, 6, 17, 20, 10), "english_upcoming_shabbat_havdalah": dt(2016, 6, 18, 21, 19), "english_parshat_hashavua": "Nasso", "hebrew_parshat_hashavua": "נשא", "english_holiday": "Shavuot", "hebrew_holiday": "שבועות", }, ), make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana I", "hebrew_holiday": "א' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "Rosh Hashana II", "hebrew_holiday": "ב' ראש השנה", }, ), make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 23), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 13), "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 14), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 13), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", "english_holiday": "", "hebrew_holiday": "", }, ), ] SHABBAT_TEST_IDS = [ "currently_first_shabbat", "currently_first_shabbat_with_havdalah_offset", "currently_first_shabbat_bein_hashmashot_lagging_date", "after_first_shabbat", "friday_upcoming_shabbat", "upcoming_rosh_hashana", "currently_rosh_hashana", "second_day_rosh_hashana", "currently_shabbat_chol_hamoed", "upcoming_two_day_yomtov_in_diaspora", "currently_first_day_of_two_day_yomtov_in_diaspora", "currently_second_day_of_two_day_yomtov_in_diaspora", "upcoming_one_day_yom_tov_in_israel", "currently_one_day_yom_tov_in_israel", "after_one_day_yom_tov_in_israel", # Type 1 = Sat/Sun/Mon "currently_first_day_of_three_day_type1_yomtov_in_diaspora", "currently_second_day_of_three_day_type1_yomtov_in_diaspora", # Type 2 = Thurs/Fri/Sat "currently_first_day_of_three_day_type2_yomtov_in_israel", "currently_second_day_of_three_day_type2_yomtov_in_israel", "currently_third_day_of_three_day_type2_yomtov_in_israel", ] @pytest.mark.parametrize("language", ["english", "hebrew"]) @pytest.mark.parametrize( [ "now", "candle_lighting", "havdalah", "diaspora", "tzname", "latitude", "longitude", "result", ], SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = time_zone.localize(now) hass.config.time_zone = time_zone hass.config.latitude = latitude hass.config.longitude = longitude registry = await hass.helpers.entity_registry.async_get_registry() with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, time_zone, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid OMER_PARAMS = [ (dt(2019, 4, 21, 0), "1"), (dt(2019, 4, 21, 23), "2"), (dt(2019, 5, 23, 0), "33"), (dt(2019, 6, 8, 0), "49"), (dt(2019, 6, 9, 0), "0"), (dt(2019, 1, 1, 0), "0"), ] OMER_TEST_IDS = [ "first_day_of_omer", "first_day_of_omer_after_tzeit", "lag_baomer", "last_day_of_omer", "shavuot_no_omer", "jan_1st_no_omer", ] @pytest.mark.parametrize(["test_time", "result"], OMER_PARAMS, ids=OMER_TEST_IDS) async def test_omer_sensor(hass, legacy_patchable_time, test_time, result): """Test Omer Count sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_day_of_the_omer").state == result DAFYOMI_PARAMS = [ (dt(2014, 4, 28, 0), "Beitzah 29"), (dt(2020, 1, 4, 0), "Niddah 73"), (dt(2020, 1, 5, 0), "Berachos 2"), (dt(2020, 3, 7, 0), "Berachos 64"), (dt(2020, 3, 8, 0), "Shabbos 2"), ] DAFYOMI_TEST_IDS = [ "randomly_picked_date", "end_of_cycle13", "start_of_cycle14", "cycle14_end_of_berachos", "cycle14_start_of_shabbos", ] @pytest.mark.parametrize(["test_time", "result"], DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) async def test_dafyomi_sensor(hass, legacy_patchable_time, test_time, result): """Test Daf Yomi sensor output.""" test_time = hass.config.time_zone.localize(test_time) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"name": "test"}} ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get("sensor.test_daf_yomi").state == result
turbokongen/home-assistant
tests/components/jewish_calendar/test_sensor.py
homeassistant/components/speedtestdotnet/__init__.py
"""Support for the Italian train system using ViaggiaTreno API.""" import asyncio import logging import aiohttp import async_timeout import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ATTR_ATTRIBUTION, HTTP_OK, TIME_MINUTES import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Powered by ViaggiaTreno Data" VIAGGIATRENO_ENDPOINT = ( "http://www.viaggiatreno.it/viaggiatrenonew/" "resteasy/viaggiatreno/andamentoTreno/" "{station_id}/{train_id}" ) REQUEST_TIMEOUT = 5 # seconds ICON = "mdi:train" MONITORED_INFO = [ "categoria", "compOrarioArrivoZeroEffettivo", "compOrarioPartenzaZeroEffettivo", "destinazione", "numeroTreno", "orarioArrivo", "orarioPartenza", "origine", "subTitle", ] DEFAULT_NAME = "Train {}" CONF_NAME = "train_name" CONF_STATION_ID = "station_id" CONF_STATION_NAME = "station_name" CONF_TRAIN_ID = "train_id" ARRIVED_STRING = "Arrived" CANCELLED_STRING = "Cancelled" NOT_DEPARTED_STRING = "Not departed yet" NO_INFORMATION_STRING = "No information for this train now" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_TRAIN_ID): cv.string, vol.Required(CONF_STATION_ID): cv.string, vol.Optional(CONF_NAME): cv.string, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the ViaggiaTreno platform.""" train_id = config.get(CONF_TRAIN_ID) station_id = config.get(CONF_STATION_ID) name = config.get(CONF_NAME) if not name: name = DEFAULT_NAME.format(train_id) async_add_entities([ViaggiaTrenoSensor(train_id, station_id, name)]) async def async_http_request(hass, uri): """Perform actual request.""" try: session = hass.helpers.aiohttp_client.async_get_clientsession(hass) with async_timeout.timeout(REQUEST_TIMEOUT): req = await session.get(uri) if req.status != HTTP_OK: return {"error": req.status} json_response = await req.json() return json_response except (asyncio.TimeoutError, aiohttp.ClientError) as exc: _LOGGER.error("Cannot connect to ViaggiaTreno API endpoint: %s", exc) except ValueError: _LOGGER.error("Received non-JSON data from ViaggiaTreno API endpoint") class ViaggiaTrenoSensor(Entity): """Implementation of a ViaggiaTreno sensor.""" def __init__(self, train_id, station_id, name): """Initialize the sensor.""" self._state = None self._attributes = {} self._unit = "" self._icon = ICON self._station_id = station_id self._name = name self.uri = VIAGGIATRENO_ENDPOINT.format( station_id=station_id, train_id=train_id ) @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def icon(self): """Icon to use in the frontend, if any.""" return self._icon @property def unit_of_measurement(self): """Return the unit of measurement.""" return self._unit @property def device_state_attributes(self): """Return extra attributes.""" self._attributes[ATTR_ATTRIBUTION] = ATTRIBUTION return self._attributes @staticmethod def has_departed(data): """Check if the train has actually departed.""" try: first_station = data["fermate"][0] if data["oraUltimoRilevamento"] or first_station["effettiva"]: return True except ValueError: _LOGGER.error("Cannot fetch first station: %s", data) return False @staticmethod def has_arrived(data): """Check if the train has already arrived.""" last_station = data["fermate"][-1] if not last_station["effettiva"]: return False return True @staticmethod def is_cancelled(data): """Check if the train is cancelled.""" if data["tipoTreno"] == "ST" and data["provvedimento"] == 1: return True return False async def async_update(self): """Update state.""" uri = self.uri res = await async_http_request(self.hass, uri) if res.get("error", ""): if res["error"] == 204: self._state = NO_INFORMATION_STRING self._unit = "" else: self._state = "Error: {}".format(res["error"]) self._unit = "" else: for i in MONITORED_INFO: self._attributes[i] = res[i] if self.is_cancelled(res): self._state = CANCELLED_STRING self._icon = "mdi:cancel" self._unit = "" elif not self.has_departed(res): self._state = NOT_DEPARTED_STRING self._unit = "" elif self.has_arrived(res): self._state = ARRIVED_STRING self._unit = "" else: self._state = res.get("ritardo") self._unit = TIME_MINUTES self._icon = ICON
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/viaggiatreno/sensor.py
"""Support for RFXtrx devices.""" import asyncio import binascii from collections import OrderedDict import copy import logging import RFXtrx as rfxtrxmod import async_timeout import voluptuous as vol from homeassistant import config_entries from homeassistant.components.binary_sensor import DEVICE_CLASSES_SCHEMA from homeassistant.const import ( CONF_COMMAND_OFF, CONF_COMMAND_ON, CONF_DEVICE, CONF_DEVICE_CLASS, CONF_DEVICE_ID, CONF_DEVICES, CONF_HOST, CONF_PORT, DEGREE, ELECTRICAL_CURRENT_AMPERE, ENERGY_KILO_WATT_HOUR, EVENT_HOMEASSISTANT_STOP, LENGTH_MILLIMETERS, PERCENTAGE, POWER_WATT, PRESSURE_HPA, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, SPEED_METERS_PER_SECOND, TEMP_CELSIUS, TIME_HOURS, UV_INDEX, VOLT, ) from homeassistant.core import callback from homeassistant.exceptions import ConfigEntryNotReady import homeassistant.helpers.config_validation as cv from homeassistant.helpers.restore_state import RestoreEntity from .const import ( ATTR_EVENT, CONF_AUTOMATIC_ADD, CONF_DATA_BITS, CONF_DEBUG, CONF_FIRE_EVENT, CONF_OFF_DELAY, CONF_REMOVE_DEVICE, CONF_SIGNAL_REPETITIONS, DATA_CLEANUP_CALLBACKS, DATA_LISTENER, DATA_RFXOBJECT, DEVICE_PACKET_TYPE_LIGHTING4, EVENT_RFXTRX_EVENT, SERVICE_SEND, ) DOMAIN = "rfxtrx" DEFAULT_SIGNAL_REPETITIONS = 1 SIGNAL_EVENT = f"{DOMAIN}_event" DATA_TYPES = OrderedDict( [ ("Temperature", TEMP_CELSIUS), ("Temperature2", TEMP_CELSIUS), ("Humidity", PERCENTAGE), ("Barometer", PRESSURE_HPA), ("Wind direction", DEGREE), ("Rain rate", f"{LENGTH_MILLIMETERS}/{TIME_HOURS}"), ("Energy usage", POWER_WATT), ("Total usage", ENERGY_KILO_WATT_HOUR), ("Sound", None), ("Sensor Status", None), ("Counter value", "count"), ("UV", UV_INDEX), ("Humidity status", None), ("Forecast", None), ("Forecast numeric", None), ("Rain total", LENGTH_MILLIMETERS), ("Wind average speed", SPEED_METERS_PER_SECOND), ("Wind gust", SPEED_METERS_PER_SECOND), ("Chill", TEMP_CELSIUS), ("Count", "count"), ("Current Ch. 1", ELECTRICAL_CURRENT_AMPERE), ("Current Ch. 2", ELECTRICAL_CURRENT_AMPERE), ("Current Ch. 3", ELECTRICAL_CURRENT_AMPERE), ("Voltage", VOLT), ("Current", ELECTRICAL_CURRENT_AMPERE), ("Battery numeric", PERCENTAGE), ("Rssi numeric", SIGNAL_STRENGTH_DECIBELS_MILLIWATT), ] ) _LOGGER = logging.getLogger(__name__) def _bytearray_string(data): val = cv.string(data) try: return bytearray.fromhex(val) except ValueError as err: raise vol.Invalid( "Data must be a hex string with multiple of two characters" ) from err def _ensure_device(value): if value is None: return DEVICE_DATA_SCHEMA({}) return DEVICE_DATA_SCHEMA(value) SERVICE_SEND_SCHEMA = vol.Schema({ATTR_EVENT: _bytearray_string}) DEVICE_DATA_SCHEMA = vol.Schema( { vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, vol.Optional(CONF_FIRE_EVENT, default=False): cv.boolean, vol.Optional(CONF_OFF_DELAY): vol.All( cv.time_period, cv.positive_timedelta, lambda value: value.total_seconds() ), vol.Optional(CONF_DATA_BITS): cv.positive_int, vol.Optional(CONF_COMMAND_ON): cv.byte, vol.Optional(CONF_COMMAND_OFF): cv.byte, vol.Optional(CONF_SIGNAL_REPETITIONS, default=1): cv.positive_int, } ) BASE_SCHEMA = vol.Schema( { vol.Optional(CONF_DEBUG): cv.boolean, vol.Optional(CONF_AUTOMATIC_ADD, default=False): cv.boolean, vol.Optional(CONF_DEVICES, default={}): {cv.string: _ensure_device}, }, ) DEVICE_SCHEMA = BASE_SCHEMA.extend({vol.Required(CONF_DEVICE): cv.string}) PORT_SCHEMA = BASE_SCHEMA.extend( {vol.Required(CONF_PORT): cv.port, vol.Optional(CONF_HOST): cv.string} ) CONFIG_SCHEMA = vol.Schema( {DOMAIN: vol.All(cv.deprecated(CONF_DEBUG), vol.Any(DEVICE_SCHEMA, PORT_SCHEMA))}, extra=vol.ALLOW_EXTRA, ) DOMAINS = ["switch", "sensor", "light", "binary_sensor", "cover"] async def async_setup(hass, config): """Set up the RFXtrx component.""" if DOMAIN not in config: return True data = { CONF_HOST: config[DOMAIN].get(CONF_HOST), CONF_PORT: config[DOMAIN].get(CONF_PORT), CONF_DEVICE: config[DOMAIN].get(CONF_DEVICE), CONF_AUTOMATIC_ADD: config[DOMAIN].get(CONF_AUTOMATIC_ADD), CONF_DEVICES: config[DOMAIN][CONF_DEVICES], } # Read device_id from the event code add to the data that will end up in the ConfigEntry for event_code, event_config in data[CONF_DEVICES].items(): event = get_rfx_object(event_code) if event is None: continue device_id = get_device_id( event.device, data_bits=event_config.get(CONF_DATA_BITS) ) event_config[CONF_DEVICE_ID] = device_id hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data, ) ) return True async def async_setup_entry(hass, entry: config_entries.ConfigEntry): """Set up the RFXtrx component.""" hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][DATA_CLEANUP_CALLBACKS] = [] await async_setup_internal(hass, entry) for domain in DOMAINS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, domain) ) return True async def async_unload_entry(hass, entry: config_entries.ConfigEntry): """Unload RFXtrx component.""" if not all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, component) for component in DOMAINS ] ) ): return False hass.services.async_remove(DOMAIN, SERVICE_SEND) for cleanup_callback in hass.data[DOMAIN][DATA_CLEANUP_CALLBACKS]: cleanup_callback() listener = hass.data[DOMAIN][DATA_LISTENER] listener() rfx_object = hass.data[DOMAIN][DATA_RFXOBJECT] await hass.async_add_executor_job(rfx_object.close_connection) hass.data.pop(DOMAIN) return True def _create_rfx(config): """Construct a rfx object based on config.""" if config[CONF_PORT] is not None: # If port is set then we create a TCP connection rfx = rfxtrxmod.Connect( (config[CONF_HOST], config[CONF_PORT]), None, transport_protocol=rfxtrxmod.PyNetworkTransport, ) else: rfx = rfxtrxmod.Connect(config[CONF_DEVICE], None) return rfx def _get_device_lookup(devices): """Get a lookup structure for devices.""" lookup = {} for event_code, event_config in devices.items(): event = get_rfx_object(event_code) if event is None: continue device_id = get_device_id( event.device, data_bits=event_config.get(CONF_DATA_BITS) ) lookup[device_id] = event_config return lookup async def async_setup_internal(hass, entry: config_entries.ConfigEntry): """Set up the RFXtrx component.""" config = entry.data # Initialize library try: async with async_timeout.timeout(5): rfx_object = await hass.async_add_executor_job(_create_rfx, config) except asyncio.TimeoutError as err: raise ConfigEntryNotReady from err # Setup some per device config devices = _get_device_lookup(config[CONF_DEVICES]) # Declare the Handle event @callback def async_handle_receive(event): """Handle received messages from RFXtrx gateway.""" # Log RFXCOM event if not event.device.id_string: return event_data = { "packet_type": event.device.packettype, "sub_type": event.device.subtype, "type_string": event.device.type_string, "id_string": event.device.id_string, "data": binascii.hexlify(event.data).decode("ASCII"), "values": getattr(event, "values", None), } _LOGGER.debug("Receive RFXCOM event: %s", event_data) data_bits = get_device_data_bits(event.device, devices) device_id = get_device_id(event.device, data_bits=data_bits) # Register new devices if config[CONF_AUTOMATIC_ADD] and device_id not in devices: _add_device(event, device_id) # Callback to HA registered components. hass.helpers.dispatcher.async_dispatcher_send(SIGNAL_EVENT, event, device_id) # Signal event to any other listeners fire_event = devices.get(device_id, {}).get(CONF_FIRE_EVENT) if fire_event: hass.bus.async_fire(EVENT_RFXTRX_EVENT, event_data) @callback def _add_device(event, device_id): """Add a device to config entry.""" config = DEVICE_DATA_SCHEMA({}) config[CONF_DEVICE_ID] = device_id data = entry.data.copy() data[CONF_DEVICES] = copy.deepcopy(entry.data[CONF_DEVICES]) event_code = binascii.hexlify(event.data).decode("ASCII") data[CONF_DEVICES][event_code] = config hass.config_entries.async_update_entry(entry=entry, data=data) devices[device_id] = config def _shutdown_rfxtrx(event): """Close connection with RFXtrx.""" rfx_object.close_connection() listener = hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown_rfxtrx) hass.data[DOMAIN][DATA_LISTENER] = listener hass.data[DOMAIN][DATA_RFXOBJECT] = rfx_object rfx_object.event_callback = lambda event: hass.add_job(async_handle_receive, event) def send(call): event = call.data[ATTR_EVENT] rfx_object.transport.send(event) hass.services.async_register(DOMAIN, SERVICE_SEND, send, schema=SERVICE_SEND_SCHEMA) def get_rfx_object(packetid): """Return the RFXObject with the packetid.""" try: binarypacket = bytearray.fromhex(packetid) except ValueError: return None pkt = rfxtrxmod.lowlevel.parse(binarypacket) if pkt is None: return None if isinstance(pkt, rfxtrxmod.lowlevel.SensorPacket): obj = rfxtrxmod.SensorEvent(pkt) elif isinstance(pkt, rfxtrxmod.lowlevel.Status): obj = rfxtrxmod.StatusEvent(pkt) else: obj = rfxtrxmod.ControlEvent(pkt) obj.data = binarypacket return obj def get_pt2262_deviceid(device_id, nb_data_bits): """Extract and return the address bits from a Lighting4/PT2262 packet.""" if nb_data_bits is None: return try: data = bytearray.fromhex(device_id) except ValueError: return None mask = 0xFF & ~((1 << nb_data_bits) - 1) data[len(data) - 1] &= mask return binascii.hexlify(data) def get_pt2262_cmd(device_id, data_bits): """Extract and return the data bits from a Lighting4/PT2262 packet.""" try: data = bytearray.fromhex(device_id) except ValueError: return None mask = 0xFF & ((1 << data_bits) - 1) return hex(data[-1] & mask) def get_device_data_bits(device, devices): """Deduce data bits for device based on a cache of device bits.""" data_bits = None if device.packettype == DEVICE_PACKET_TYPE_LIGHTING4: for device_id, entity_config in devices.items(): bits = entity_config.get(CONF_DATA_BITS) if get_device_id(device, bits) == device_id: data_bits = bits break return data_bits def find_possible_pt2262_device(device_ids, device_id): """Look for the device which id matches the given device_id parameter.""" for dev_id in device_ids: if len(dev_id) == len(device_id): size = None for i, (char1, char2) in enumerate(zip(dev_id, device_id)): if char1 != char2: break size = i if size is not None: size = len(dev_id) - size - 1 _LOGGER.info( "rfxtrx: found possible device %s for %s " "with the following configuration:\n" "data_bits=%d\n" "command_on=0x%s\n" "command_off=0x%s\n", device_id, dev_id, size * 4, dev_id[-size:], device_id[-size:], ) return dev_id return None def get_device_id(device, data_bits=None): """Calculate a device id for device.""" id_string = device.id_string if data_bits and device.packettype == DEVICE_PACKET_TYPE_LIGHTING4: masked_id = get_pt2262_deviceid(id_string, data_bits) if masked_id: id_string = masked_id.decode("ASCII") return (f"{device.packettype:x}", f"{device.subtype:x}", id_string) def connect_auto_add(hass, entry_data, callback_fun): """Connect to dispatcher for automatic add.""" if entry_data[CONF_AUTOMATIC_ADD]: hass.data[DOMAIN][DATA_CLEANUP_CALLBACKS].append( hass.helpers.dispatcher.async_dispatcher_connect(SIGNAL_EVENT, callback_fun) ) class RfxtrxEntity(RestoreEntity): """Represents a Rfxtrx device. Contains the common logic for Rfxtrx lights and switches. """ def __init__(self, device, device_id, event=None): """Initialize the device.""" self._name = f"{device.type_string} {device.id_string}" self._device = device self._event = event self._device_id = device_id self._unique_id = "_".join(x for x in self._device_id) async def async_added_to_hass(self): """Restore RFXtrx device state (ON/OFF).""" if self._event: self._apply_event(self._event) self.async_on_remove( self.hass.helpers.dispatcher.async_dispatcher_connect( SIGNAL_EVENT, self._handle_event ) ) self.async_on_remove( self.hass.helpers.dispatcher.async_dispatcher_connect( f"{DOMAIN}_{CONF_REMOVE_DEVICE}_{self._device_id}", self.async_remove ) ) @property def should_poll(self): """No polling needed for a RFXtrx switch.""" return False @property def name(self): """Return the name of the device if any.""" return self._name @property def device_state_attributes(self): """Return the device state attributes.""" if not self._event: return None return {ATTR_EVENT: "".join(f"{x:02x}" for x in self._event.data)} @property def assumed_state(self): """Return true if unable to access real state of entity.""" return True @property def unique_id(self): """Return unique identifier of remote device.""" return self._unique_id @property def device_info(self): """Return the device info.""" return { "identifiers": {(DOMAIN, *self._device_id)}, "name": f"{self._device.type_string} {self._device.id_string}", "model": self._device.type_string, } def _apply_event(self, event): """Apply a received event.""" self._event = event @callback def _handle_event(self, event, device_id): """Handle a reception of data, overridden by other classes.""" class RfxtrxCommandEntity(RfxtrxEntity): """Represents a Rfxtrx device. Contains the common logic for Rfxtrx lights and switches. """ def __init__(self, device, device_id, signal_repetitions=1, event=None): """Initialzie a switch or light device.""" super().__init__(device, device_id, event=event) self.signal_repetitions = signal_repetitions self._state = None async def _async_send(self, fun, *args): rfx_object = self.hass.data[DOMAIN][DATA_RFXOBJECT] for _ in range(self.signal_repetitions): await self.hass.async_add_executor_job(fun, rfx_object.transport, *args)
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/rfxtrx/__init__.py
"""Support for the demo image processing.""" from homeassistant.components.image_processing import ( ATTR_AGE, ATTR_CONFIDENCE, ATTR_GENDER, ATTR_NAME, ImageProcessingFaceEntity, ) from homeassistant.components.openalpr_local.image_processing import ( ImageProcessingAlprEntity, ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the demo image processing platform.""" add_entities( [ DemoImageProcessingAlpr("camera.demo_camera", "Demo Alpr"), DemoImageProcessingFace("camera.demo_camera", "Demo Face"), ] ) class DemoImageProcessingAlpr(ImageProcessingAlprEntity): """Demo ALPR image processing entity.""" def __init__(self, camera_entity, name): """Initialize demo ALPR image processing entity.""" super().__init__() self._name = name self._camera = camera_entity @property def camera_entity(self): """Return camera entity id from process pictures.""" return self._camera @property def confidence(self): """Return minimum confidence for send events.""" return 80 @property def name(self): """Return the name of the entity.""" return self._name def process_image(self, image): """Process image.""" demo_data = { "AC3829": 98.3, "BE392034": 95.5, "CD02394": 93.4, "DF923043": 90.8, } self.process_plates(demo_data, 1) class DemoImageProcessingFace(ImageProcessingFaceEntity): """Demo face identify image processing entity.""" def __init__(self, camera_entity, name): """Initialize demo face image processing entity.""" super().__init__() self._name = name self._camera = camera_entity @property def camera_entity(self): """Return camera entity id from process pictures.""" return self._camera @property def confidence(self): """Return minimum confidence for send events.""" return 80 @property def name(self): """Return the name of the entity.""" return self._name def process_image(self, image): """Process image.""" demo_data = [ { ATTR_CONFIDENCE: 98.34, ATTR_NAME: "Hans", ATTR_AGE: 16.0, ATTR_GENDER: "male", }, {ATTR_NAME: "Helena", ATTR_AGE: 28.0, ATTR_GENDER: "female"}, {ATTR_CONFIDENCE: 62.53, ATTR_NAME: "Luna"}, ] self.process_faces(demo_data, 4)
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/demo/image_processing.py
"""Util for Conversation.""" import re def create_matcher(utterance): """Create a regex that matches the utterance.""" # Split utterance into parts that are type: NORMAL, GROUP or OPTIONAL # Pattern matches (GROUP|OPTIONAL): Change light to [the color] {name} parts = re.split(r"({\w+}|\[[\w\s]+\] *)", utterance) # Pattern to extract name from GROUP part. Matches {name} group_matcher = re.compile(r"{(\w+)}") # Pattern to extract text from OPTIONAL part. Matches [the color] optional_matcher = re.compile(r"\[([\w ]+)\] *") pattern = ["^"] for part in parts: group_match = group_matcher.match(part) optional_match = optional_matcher.match(part) # Normal part if group_match is None and optional_match is None: pattern.append(part) continue # Group part if group_match is not None: pattern.append(r"(?P<{}>[\w ]+?)\s*".format(group_match.groups()[0])) # Optional part elif optional_match is not None: pattern.append(r"(?:{} *)?".format(optional_match.groups()[0])) pattern.append("$") return re.compile("".join(pattern), re.I)
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/conversation/util.py
"""Config validation helper for the script integration.""" import asyncio import voluptuous as vol from homeassistant.config import async_log_exception from homeassistant.const import CONF_SEQUENCE from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.script import async_validate_action_config from . import DOMAIN, SCRIPT_ENTRY_SCHEMA async def async_validate_config_item(hass, config, full_config=None): """Validate config item.""" config = SCRIPT_ENTRY_SCHEMA(config) config[CONF_SEQUENCE] = await asyncio.gather( *[ async_validate_action_config(hass, action) for action in config[CONF_SEQUENCE] ] ) return config async def _try_async_validate_config_item(hass, object_id, config, full_config=None): """Validate config item.""" try: cv.slug(object_id) config = await async_validate_config_item(hass, config, full_config) except (vol.Invalid, HomeAssistantError) as ex: async_log_exception(ex, DOMAIN, full_config or config, hass) return None return config async def async_validate_config(hass, config): """Validate config.""" if DOMAIN in config: validated_config = {} for object_id, cfg in config[DOMAIN].items(): cfg = await _try_async_validate_config_item(hass, object_id, cfg, config) if cfg is not None: validated_config[object_id] = cfg config[DOMAIN] = validated_config return config
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/script/config.py
"""Support for the OpenWeatherMap (OWM) service.""" from .abstract_owm_sensor import AbstractOpenWeatherMapSensor from .const import ( ATTR_API_FORECAST, DOMAIN, ENTRY_NAME, ENTRY_WEATHER_COORDINATOR, FORECAST_MONITORED_CONDITIONS, FORECAST_SENSOR_TYPES, MONITORED_CONDITIONS, WEATHER_SENSOR_TYPES, ) from .weather_update_coordinator import WeatherUpdateCoordinator async def async_setup_entry(hass, config_entry, async_add_entities): """Set up OpenWeatherMap sensor entities based on a config entry.""" domain_data = hass.data[DOMAIN][config_entry.entry_id] name = domain_data[ENTRY_NAME] weather_coordinator = domain_data[ENTRY_WEATHER_COORDINATOR] weather_sensor_types = WEATHER_SENSOR_TYPES forecast_sensor_types = FORECAST_SENSOR_TYPES entities = [] for sensor_type in MONITORED_CONDITIONS: unique_id = f"{config_entry.unique_id}-{sensor_type}" entities.append( OpenWeatherMapSensor( name, unique_id, sensor_type, weather_sensor_types[sensor_type], weather_coordinator, ) ) for sensor_type in FORECAST_MONITORED_CONDITIONS: unique_id = f"{config_entry.unique_id}-forecast-{sensor_type}" entities.append( OpenWeatherMapForecastSensor( f"{name} Forecast", unique_id, sensor_type, forecast_sensor_types[sensor_type], weather_coordinator, ) ) async_add_entities(entities) class OpenWeatherMapSensor(AbstractOpenWeatherMapSensor): """Implementation of an OpenWeatherMap sensor.""" def __init__( self, name, unique_id, sensor_type, sensor_configuration, weather_coordinator: WeatherUpdateCoordinator, ): """Initialize the sensor.""" super().__init__( name, unique_id, sensor_type, sensor_configuration, weather_coordinator ) self._weather_coordinator = weather_coordinator @property def state(self): """Return the state of the device.""" return self._weather_coordinator.data.get(self._sensor_type, None) class OpenWeatherMapForecastSensor(AbstractOpenWeatherMapSensor): """Implementation of an OpenWeatherMap this day forecast sensor.""" def __init__( self, name, unique_id, sensor_type, sensor_configuration, weather_coordinator: WeatherUpdateCoordinator, ): """Initialize the sensor.""" super().__init__( name, unique_id, sensor_type, sensor_configuration, weather_coordinator ) self._weather_coordinator = weather_coordinator @property def state(self): """Return the state of the device.""" forecasts = self._weather_coordinator.data.get(ATTR_API_FORECAST) if forecasts is not None and len(forecasts) > 0: return forecasts[0].get(self._sensor_type, None) return None
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/openweathermap/sensor.py
"""Camera platform that receives images through HTTP POST.""" import asyncio from collections import deque from datetime import timedelta import logging import aiohttp import async_timeout import voluptuous as vol from homeassistant.components.camera import ( PLATFORM_SCHEMA, STATE_IDLE, STATE_RECORDING, Camera, ) from homeassistant.components.camera.const import DOMAIN from homeassistant.const import CONF_NAME, CONF_TIMEOUT, CONF_WEBHOOK_ID from homeassistant.core import callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.event import async_track_point_in_utc_time import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) CONF_BUFFER_SIZE = "buffer" CONF_IMAGE_FIELD = "field" DEFAULT_NAME = "Push Camera" ATTR_FILENAME = "filename" ATTR_LAST_TRIP = "last_trip" PUSH_CAMERA_DATA = "push_camera" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_BUFFER_SIZE, default=1): cv.positive_int, vol.Optional(CONF_TIMEOUT, default=timedelta(seconds=5)): vol.All( cv.time_period, cv.positive_timedelta ), vol.Optional(CONF_IMAGE_FIELD, default="image"): cv.string, vol.Required(CONF_WEBHOOK_ID): cv.string, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Push Camera platform.""" if PUSH_CAMERA_DATA not in hass.data: hass.data[PUSH_CAMERA_DATA] = {} webhook_id = config.get(CONF_WEBHOOK_ID) cameras = [ PushCamera( hass, config[CONF_NAME], config[CONF_BUFFER_SIZE], config[CONF_TIMEOUT], config[CONF_IMAGE_FIELD], webhook_id, ) ] async_add_entities(cameras) async def handle_webhook(hass, webhook_id, request): """Handle incoming webhook POST with image files.""" try: with async_timeout.timeout(5): data = dict(await request.post()) except (asyncio.TimeoutError, aiohttp.web.HTTPException) as error: _LOGGER.error("Could not get information from POST <%s>", error) return camera = hass.data[PUSH_CAMERA_DATA][webhook_id] if camera.image_field not in data: _LOGGER.warning("Webhook call without POST parameter <%s>", camera.image_field) return await camera.update_image( data[camera.image_field].file.read(), data[camera.image_field].filename ) class PushCamera(Camera): """The representation of a Push camera.""" def __init__(self, hass, name, buffer_size, timeout, image_field, webhook_id): """Initialize push camera component.""" super().__init__() self._name = name self._last_trip = None self._filename = None self._expired_listener = None self._state = STATE_IDLE self._timeout = timeout self.queue = deque([], buffer_size) self._current_image = None self._image_field = image_field self.webhook_id = webhook_id self.webhook_url = hass.components.webhook.async_generate_url(webhook_id) async def async_added_to_hass(self): """Call when entity is added to hass.""" self.hass.data[PUSH_CAMERA_DATA][self.webhook_id] = self try: self.hass.components.webhook.async_register( DOMAIN, self.name, self.webhook_id, handle_webhook ) except ValueError: _LOGGER.error( "In <%s>, webhook_id <%s> already used", self.name, self.webhook_id ) @property def image_field(self): """HTTP field containing the image file.""" return self._image_field @property def state(self): """Return current state of the camera.""" return self._state async def update_image(self, image, filename): """Update the camera image.""" if self._state == STATE_IDLE: self._state = STATE_RECORDING self._last_trip = dt_util.utcnow() self.queue.clear() self._filename = filename self.queue.appendleft(image) @callback def reset_state(now): """Set state to idle after no new images for a period of time.""" self._state = STATE_IDLE self._expired_listener = None _LOGGER.debug("Reset state") self.async_write_ha_state() if self._expired_listener: self._expired_listener() self._expired_listener = async_track_point_in_utc_time( self.hass, reset_state, dt_util.utcnow() + self._timeout ) self.async_write_ha_state() async def async_camera_image(self): """Return a still image response.""" if self.queue: if self._state == STATE_IDLE: self.queue.rotate(1) self._current_image = self.queue[0] return self._current_image @property def name(self): """Return the name of this camera.""" return self._name @property def motion_detection_enabled(self): """Camera Motion Detection Status.""" return False @property def device_state_attributes(self): """Return the state attributes.""" return { name: value for name, value in ( (ATTR_LAST_TRIP, self._last_trip), (ATTR_FILENAME, self._filename), ) if value is not None }
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/push/camera.py
"""Support Wink alarm control panels.""" import pywink import homeassistant.components.alarm_control_panel as alarm from homeassistant.components.alarm_control_panel.const import ( SUPPORT_ALARM_ARM_AWAY, SUPPORT_ALARM_ARM_HOME, ) from homeassistant.const import ( STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED, ) from . import DOMAIN, WinkDevice STATE_ALARM_PRIVACY = "Private" def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Wink platform.""" for camera in pywink.get_cameras(): # get_cameras returns multiple device types. # Only add those that aren't sensors. try: camera.capability() except AttributeError: _id = camera.object_id() + camera.name() if _id not in hass.data[DOMAIN]["unique_ids"]: add_entities([WinkCameraDevice(camera, hass)]) class WinkCameraDevice(WinkDevice, alarm.AlarmControlPanelEntity): """Representation a Wink camera alarm.""" async def async_added_to_hass(self): """Call when entity is added to hass.""" self.hass.data[DOMAIN]["entities"]["alarm_control_panel"].append(self) @property def state(self): """Return the state of the device.""" wink_state = self.wink.state() if wink_state == "away": state = STATE_ALARM_ARMED_AWAY elif wink_state == "home": state = STATE_ALARM_DISARMED elif wink_state == "night": state = STATE_ALARM_ARMED_HOME else: state = None return state @property def supported_features(self) -> int: """Return the list of supported features.""" return SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY def alarm_disarm(self, code=None): """Send disarm command.""" self.wink.set_mode("home") def alarm_arm_home(self, code=None): """Send arm home command.""" self.wink.set_mode("night") def alarm_arm_away(self, code=None): """Send arm away command.""" self.wink.set_mode("away") @property def device_state_attributes(self): """Return the state attributes.""" return {"private": self.wink.private()}
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/wink/alarm_control_panel.py
"""Support for LaMetric time.""" import logging from lmnotify import LaMetricManager import voluptuous as vol from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DOMAIN = "lametric" LAMETRIC_DEVICES = "LAMETRIC_DEVICES" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_CLIENT_ID): cv.string, vol.Required(CONF_CLIENT_SECRET): cv.string, } ) }, extra=vol.ALLOW_EXTRA, ) def setup(hass, config): """Set up the LaMetricManager.""" _LOGGER.debug("Setting up LaMetric platform") conf = config[DOMAIN] hlmn = HassLaMetricManager( client_id=conf[CONF_CLIENT_ID], client_secret=conf[CONF_CLIENT_SECRET] ) devices = hlmn.manager.get_devices() if not devices: _LOGGER.error("No LaMetric devices found") return False hass.data[DOMAIN] = hlmn for dev in devices: _LOGGER.debug("Discovered LaMetric device: %s", dev) return True class HassLaMetricManager: """A class that encapsulated requests to the LaMetric manager.""" def __init__(self, client_id, client_secret): """Initialize HassLaMetricManager and connect to LaMetric.""" _LOGGER.debug("Connecting to LaMetric") self.manager = LaMetricManager(client_id, client_secret) self._client_id = client_id self._client_secret = client_secret
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/lametric/__init__.py
"""Support for the Pico TTS speech service.""" import logging import os import shutil import subprocess import tempfile import voluptuous as vol from homeassistant.components.tts import CONF_LANG, PLATFORM_SCHEMA, Provider _LOGGER = logging.getLogger(__name__) SUPPORT_LANGUAGES = ["en-US", "en-GB", "de-DE", "es-ES", "fr-FR", "it-IT"] DEFAULT_LANG = "en-US" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORT_LANGUAGES)} ) def get_engine(hass, config, discovery_info=None): """Set up Pico speech component.""" if shutil.which("pico2wave") is None: _LOGGER.error("'pico2wave' was not found") return False return PicoProvider(config[CONF_LANG]) class PicoProvider(Provider): """The Pico TTS API provider.""" def __init__(self, lang): """Initialize Pico TTS provider.""" self._lang = lang self.name = "PicoTTS" @property def default_language(self): """Return the default language.""" return self._lang @property def supported_languages(self): """Return list of supported languages.""" return SUPPORT_LANGUAGES def get_tts_audio(self, message, language, options=None): """Load TTS using pico2wave.""" with tempfile.NamedTemporaryFile(suffix=".wav", delete=False) as tmpf: fname = tmpf.name cmd = ["pico2wave", "--wave", fname, "-l", language, message] subprocess.call(cmd) data = None try: with open(fname, "rb") as voice: data = voice.read() except OSError: _LOGGER.error("Error trying to read %s", fname) return (None, None) finally: os.remove(fname) if data: return ("wav", data) return (None, None)
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/picotts/tts.py
"""Support for HDMI CEC devices as switches.""" import logging from homeassistant.components.switch import DOMAIN, SwitchEntity from homeassistant.const import STATE_OFF, STATE_ON, STATE_STANDBY from . import ATTR_NEW, CecEntity _LOGGER = logging.getLogger(__name__) ENTITY_ID_FORMAT = DOMAIN + ".{}" def setup_platform(hass, config, add_entities, discovery_info=None): """Find and return HDMI devices as switches.""" if ATTR_NEW in discovery_info: _LOGGER.info("Setting up HDMI devices %s", discovery_info[ATTR_NEW]) entities = [] for device in discovery_info[ATTR_NEW]: hdmi_device = hass.data.get(device) entities.append(CecSwitchEntity(hdmi_device, hdmi_device.logical_address)) add_entities(entities, True) class CecSwitchEntity(CecEntity, SwitchEntity): """Representation of a HDMI device as a Switch.""" def __init__(self, device, logical) -> None: """Initialize the HDMI device.""" CecEntity.__init__(self, device, logical) self.entity_id = f"{DOMAIN}.hdmi_{hex(self._logical_address)[2:]}" def turn_on(self, **kwargs) -> None: """Turn device on.""" self._device.turn_on() self._state = STATE_ON self.schedule_update_ha_state(force_refresh=False) def turn_off(self, **kwargs) -> None: """Turn device off.""" self._device.turn_off() self._state = STATE_OFF self.schedule_update_ha_state(force_refresh=False) def toggle(self, **kwargs): """Toggle the entity.""" self._device.toggle() if self._state == STATE_ON: self._state = STATE_OFF else: self._state = STATE_ON self.schedule_update_ha_state(force_refresh=False) @property def is_on(self) -> bool: """Return True if entity is on.""" return self._state == STATE_ON @property def is_standby(self): """Return true if device is in standby.""" return self._state == STATE_OFF or self._state == STATE_STANDBY @property def state(self) -> str: """Return the cached state of device.""" return self._state
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/hdmi_cec/switch.py
"""Support for Västtrafik public transport.""" from datetime import timedelta import logging import vasttrafik import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle from homeassistant.util.dt import now _LOGGER = logging.getLogger(__name__) ATTR_ACCESSIBILITY = "accessibility" ATTR_DIRECTION = "direction" ATTR_LINE = "line" ATTR_TRACK = "track" ATTRIBUTION = "Data provided by Västtrafik" CONF_DELAY = "delay" CONF_DEPARTURES = "departures" CONF_FROM = "from" CONF_HEADING = "heading" CONF_LINES = "lines" CONF_KEY = "key" CONF_SECRET = "secret" DEFAULT_DELAY = 0 ICON = "mdi:train" MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=120) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_KEY): cv.string, vol.Required(CONF_SECRET): cv.string, vol.Optional(CONF_DEPARTURES): [ { vol.Required(CONF_FROM): cv.string, vol.Optional(CONF_DELAY, default=DEFAULT_DELAY): cv.positive_int, vol.Optional(CONF_HEADING): cv.string, vol.Optional(CONF_LINES, default=[]): vol.All( cv.ensure_list, [cv.string] ), vol.Optional(CONF_NAME): cv.string, } ], } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the departure sensor.""" planner = vasttrafik.JournyPlanner(config.get(CONF_KEY), config.get(CONF_SECRET)) sensors = [] for departure in config.get(CONF_DEPARTURES): sensors.append( VasttrafikDepartureSensor( planner, departure.get(CONF_NAME), departure.get(CONF_FROM), departure.get(CONF_HEADING), departure.get(CONF_LINES), departure.get(CONF_DELAY), ) ) add_entities(sensors, True) class VasttrafikDepartureSensor(Entity): """Implementation of a Vasttrafik Departure Sensor.""" def __init__(self, planner, name, departure, heading, lines, delay): """Initialize the sensor.""" self._planner = planner self._name = name or departure self._departure = planner.location_name(departure)[0] self._heading = planner.location_name(heading)[0] if heading else None self._lines = lines if lines else None self._delay = timedelta(minutes=delay) self._departureboard = None self._state = None self._attributes = None @property def name(self): """Return the name of the sensor.""" return self._name @property def icon(self): """Return the icon for the frontend.""" return ICON @property def device_state_attributes(self): """Return the state attributes.""" return self._attributes @property def state(self): """Return the next departure time.""" return self._state @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the departure board.""" try: self._departureboard = self._planner.departureboard( self._departure["id"], direction=self._heading["id"] if self._heading else None, date=now() + self._delay, ) except vasttrafik.Error: _LOGGER.debug("Unable to read departure board, updating token") self._planner.update_token() if not self._departureboard: _LOGGER.debug( "No departures from %s heading %s", self._departure["name"], self._heading["name"] if self._heading else "ANY", ) self._state = None self._attributes = {} else: for departure in self._departureboard: line = departure.get("sname") if not self._lines or line in self._lines: if "rtTime" in self._departureboard[0]: self._state = self._departureboard[0]["rtTime"] else: self._state = self._departureboard[0]["time"] params = { ATTR_ACCESSIBILITY: departure.get("accessibility"), ATTR_ATTRIBUTION: ATTRIBUTION, ATTR_DIRECTION: departure.get("direction"), ATTR_LINE: departure.get("sname"), ATTR_TRACK: departure.get("track"), } self._attributes = {k: v for k, v in params.items() if v} break
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/vasttrafik/sensor.py
"""Config flow to configure Met component.""" from typing import Any, Dict, Optional import voluptuous as vol from homeassistant import config_entries from homeassistant.const import CONF_ELEVATION, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from .const import CONF_TRACK_HOME, DOMAIN, HOME_LOCATION_NAME @callback def configured_instances(hass): """Return a set of configured SimpliSafe instances.""" entries = [] for entry in hass.config_entries.async_entries(DOMAIN): if entry.data.get("track_home"): entries.append("home") continue entries.append( f"{entry.data.get(CONF_LATITUDE)}-{entry.data.get(CONF_LONGITUDE)}" ) return set(entries) class MetFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Config flow for Met component.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL def __init__(self): """Init MetFlowHandler.""" self._errors = {} async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" self._errors = {} if user_input is not None: if ( f"{user_input.get(CONF_LATITUDE)}-{user_input.get(CONF_LONGITUDE)}" not in configured_instances(self.hass) ): return self.async_create_entry( title=user_input[CONF_NAME], data=user_input ) self._errors[CONF_NAME] = "already_configured" return await self._show_config_form( name=HOME_LOCATION_NAME, latitude=self.hass.config.latitude, longitude=self.hass.config.longitude, elevation=self.hass.config.elevation, ) async def _show_config_form( self, name=None, latitude=None, longitude=None, elevation=None ): """Show the configuration form to edit location data.""" return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Required(CONF_NAME, default=name): str, vol.Required(CONF_LATITUDE, default=latitude): cv.latitude, vol.Required(CONF_LONGITUDE, default=longitude): cv.longitude, vol.Required(CONF_ELEVATION, default=elevation): int, } ), errors=self._errors, ) async def async_step_import( self, user_input: Optional[Dict] = None ) -> Dict[str, Any]: """Handle configuration by yaml file.""" return await self.async_step_user(user_input) async def async_step_onboarding(self, data=None): """Handle a flow initialized by onboarding.""" return self.async_create_entry( title=HOME_LOCATION_NAME, data={CONF_TRACK_HOME: True} )
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/met/config_flow.py
"""Support for Fibaro binary sensors.""" from homeassistant.components.binary_sensor import ( DEVICE_CLASS_DOOR, DEVICE_CLASS_MOTION, DEVICE_CLASS_SMOKE, DEVICE_CLASS_WINDOW, DOMAIN, BinarySensorEntity, ) from homeassistant.const import CONF_DEVICE_CLASS, CONF_ICON from . import FIBARO_DEVICES, FibaroDevice SENSOR_TYPES = { "com.fibaro.floodSensor": ["Flood", "mdi:water", "flood"], "com.fibaro.motionSensor": ["Motion", "mdi:run", DEVICE_CLASS_MOTION], "com.fibaro.doorSensor": ["Door", "mdi:window-open", DEVICE_CLASS_DOOR], "com.fibaro.windowSensor": ["Window", "mdi:window-open", DEVICE_CLASS_WINDOW], "com.fibaro.smokeSensor": ["Smoke", "mdi:smoking", DEVICE_CLASS_SMOKE], "com.fibaro.FGMS001": ["Motion", "mdi:run", DEVICE_CLASS_MOTION], "com.fibaro.heatDetector": ["Heat", "mdi:fire", "heat"], } def setup_platform(hass, config, add_entities, discovery_info=None): """Perform the setup for Fibaro controller devices.""" if discovery_info is None: return add_entities( [ FibaroBinarySensor(device) for device in hass.data[FIBARO_DEVICES]["binary_sensor"] ], True, ) class FibaroBinarySensor(FibaroDevice, BinarySensorEntity): """Representation of a Fibaro Binary Sensor.""" def __init__(self, fibaro_device): """Initialize the binary_sensor.""" self._state = None super().__init__(fibaro_device) self.entity_id = f"{DOMAIN}.{self.ha_id}" stype = None devconf = fibaro_device.device_config if fibaro_device.type in SENSOR_TYPES: stype = fibaro_device.type elif fibaro_device.baseType in SENSOR_TYPES: stype = fibaro_device.baseType if stype: self._device_class = SENSOR_TYPES[stype][2] self._icon = SENSOR_TYPES[stype][1] else: self._device_class = None self._icon = None # device_config overrides: self._device_class = devconf.get(CONF_DEVICE_CLASS, self._device_class) self._icon = devconf.get(CONF_ICON, self._icon) @property def icon(self): """Icon to use in the frontend, if any.""" return self._icon @property def device_class(self): """Return the device class of the sensor.""" return self._device_class @property def is_on(self): """Return true if sensor is on.""" return self._state def update(self): """Get the latest data and update the state.""" self._state = self.current_binary_state
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/fibaro/binary_sensor.py
"""Pushbullet platform for notify component.""" import logging import mimetypes from pushbullet import InvalidKeyError, PushBullet, PushError import voluptuous as vol from homeassistant.components.notify import ( ATTR_DATA, ATTR_TARGET, ATTR_TITLE, ATTR_TITLE_DEFAULT, PLATFORM_SCHEMA, BaseNotificationService, ) from homeassistant.const import CONF_API_KEY import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) ATTR_URL = "url" ATTR_FILE = "file" ATTR_FILE_URL = "file_url" ATTR_LIST = "list" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_API_KEY): cv.string}) def get_service(hass, config, discovery_info=None): """Get the Pushbullet notification service.""" try: pushbullet = PushBullet(config[CONF_API_KEY]) except InvalidKeyError: _LOGGER.error("Wrong API key supplied") return None return PushBulletNotificationService(pushbullet) class PushBulletNotificationService(BaseNotificationService): """Implement the notification service for Pushbullet.""" def __init__(self, pb): """Initialize the service.""" self.pushbullet = pb self.pbtargets = {} self.refresh() def refresh(self): """Refresh devices, contacts, etc. pbtargets stores all targets available from this Pushbullet instance into a dict. These are Pushbullet objects!. It sacrifices a bit of memory for faster processing at send_message. As of sept 2015, contacts were replaced by chats. This is not implemented in the module yet. """ self.pushbullet.refresh() self.pbtargets = { "device": {tgt.nickname.lower(): tgt for tgt in self.pushbullet.devices}, "channel": { tgt.channel_tag.lower(): tgt for tgt in self.pushbullet.channels }, } def send_message(self, message=None, **kwargs): """Send a message to a specified target. If no target specified, a 'normal' push will be sent to all devices linked to the Pushbullet account. Email is special, these are assumed to always exist. We use a special call which doesn't require a push object. """ targets = kwargs.get(ATTR_TARGET) title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT) data = kwargs.get(ATTR_DATA) refreshed = False if not targets: # Backward compatibility, notify all devices in own account. self._push_data(message, title, data, self.pushbullet) _LOGGER.info("Sent notification to self") return # Main loop, process all targets specified. for target in targets: try: ttype, tname = target.split("/", 1) except ValueError: _LOGGER.error("Invalid target syntax: %s", target) continue # Target is email, send directly, don't use a target object. # This also seems to work to send to all devices in own account. if ttype == "email": self._push_data(message, title, data, self.pushbullet, email=tname) _LOGGER.info("Sent notification to email %s", tname) continue # Target is sms, send directly, don't use a target object. if ttype == "sms": self._push_data( message, title, data, self.pushbullet, phonenumber=tname ) _LOGGER.info("Sent sms notification to %s", tname) continue # Refresh if name not found. While awaiting periodic refresh # solution in component, poor mans refresh. if ttype not in self.pbtargets: _LOGGER.error("Invalid target syntax: %s", target) continue tname = tname.lower() if tname not in self.pbtargets[ttype] and not refreshed: self.refresh() refreshed = True # Attempt push_note on a dict value. Keys are types & target # name. Dict pbtargets has all *actual* targets. try: self._push_data(message, title, data, self.pbtargets[ttype][tname]) _LOGGER.info("Sent notification to %s/%s", ttype, tname) except KeyError: _LOGGER.error("No such target: %s/%s", ttype, tname) continue def _push_data(self, message, title, data, pusher, email=None, phonenumber=None): """Create the message content.""" if data is None: data = {} data_list = data.get(ATTR_LIST) url = data.get(ATTR_URL) filepath = data.get(ATTR_FILE) file_url = data.get(ATTR_FILE_URL) try: email_kwargs = {} if email: email_kwargs["email"] = email if phonenumber: device = pusher.devices[0] pusher.push_sms(device, phonenumber, message) elif url: pusher.push_link(title, url, body=message, **email_kwargs) elif filepath: if not self.hass.config.is_allowed_path(filepath): _LOGGER.error("Filepath is not valid or allowed") return with open(filepath, "rb") as fileh: filedata = self.pushbullet.upload_file(fileh, filepath) if filedata.get("file_type") == "application/x-empty": _LOGGER.error("Can not send an empty file") return filedata.update(email_kwargs) pusher.push_file(title=title, body=message, **filedata) elif file_url: if not file_url.startswith("http"): _LOGGER.error("URL should start with http or https") return pusher.push_file( title=title, body=message, file_name=file_url, file_url=file_url, file_type=(mimetypes.guess_type(file_url)[0]), **email_kwargs, ) elif data_list: pusher.push_list(title, data_list, **email_kwargs) else: pusher.push_note(title, message, **email_kwargs) except PushError as err: _LOGGER.error("Notify failed: %s", err)
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/pushbullet/notify.py
"""Config flow to configure the Synology DSM integration.""" import logging from urllib.parse import urlparse from synology_dsm import SynologyDSM from synology_dsm.exceptions import ( SynologyDSMException, SynologyDSMLogin2SAFailedException, SynologyDSMLogin2SARequiredException, SynologyDSMLoginInvalidException, SynologyDSMRequestException, ) import voluptuous as vol from homeassistant import config_entries, exceptions from homeassistant.components import ssdp from homeassistant.const import ( CONF_DISKS, CONF_HOST, CONF_MAC, CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_SCAN_INTERVAL, CONF_SSL, CONF_TIMEOUT, CONF_USERNAME, CONF_VERIFY_SSL, ) from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from .const import ( CONF_VOLUMES, DEFAULT_PORT, DEFAULT_PORT_SSL, DEFAULT_SCAN_INTERVAL, DEFAULT_TIMEOUT, DEFAULT_USE_SSL, DEFAULT_VERIFY_SSL, ) from .const import DOMAIN # pylint: disable=unused-import _LOGGER = logging.getLogger(__name__) CONF_OTP_CODE = "otp_code" def _discovery_schema_with_defaults(discovery_info): return vol.Schema(_ordered_shared_schema(discovery_info)) def _user_schema_with_defaults(user_input): user_schema = { vol.Required(CONF_HOST, default=user_input.get(CONF_HOST, "")): str, } user_schema.update(_ordered_shared_schema(user_input)) return vol.Schema(user_schema) def _ordered_shared_schema(schema_input): return { vol.Required(CONF_USERNAME, default=schema_input.get(CONF_USERNAME, "")): str, vol.Required(CONF_PASSWORD, default=schema_input.get(CONF_PASSWORD, "")): str, vol.Optional(CONF_PORT, default=schema_input.get(CONF_PORT, "")): str, vol.Optional( CONF_SSL, default=schema_input.get(CONF_SSL, DEFAULT_USE_SSL) ): bool, vol.Optional( CONF_VERIFY_SSL, default=schema_input.get(CONF_VERIFY_SSL, DEFAULT_VERIFY_SSL), ): bool, } class SynologyDSMFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL @staticmethod @callback def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return SynologyDSMOptionsFlowHandler(config_entry) def __init__(self): """Initialize the synology_dsm config flow.""" self.saved_user_input = {} self.discovered_conf = {} async def _show_setup_form(self, user_input=None, errors=None): """Show the setup form to the user.""" if not user_input: user_input = {} if self.discovered_conf: user_input.update(self.discovered_conf) step_id = "link" data_schema = _discovery_schema_with_defaults(user_input) else: step_id = "user" data_schema = _user_schema_with_defaults(user_input) return self.async_show_form( step_id=step_id, data_schema=data_schema, errors=errors or {}, description_placeholders=self.discovered_conf or {}, ) async def async_step_user(self, user_input=None): """Handle a flow initiated by the user.""" errors = {} if user_input is None: return await self._show_setup_form(user_input, None) if self.discovered_conf: user_input.update(self.discovered_conf) host = user_input[CONF_HOST] port = user_input.get(CONF_PORT) username = user_input[CONF_USERNAME] password = user_input[CONF_PASSWORD] use_ssl = user_input.get(CONF_SSL, DEFAULT_USE_SSL) verify_ssl = user_input.get(CONF_VERIFY_SSL, DEFAULT_VERIFY_SSL) otp_code = user_input.get(CONF_OTP_CODE) if not port: if use_ssl is True: port = DEFAULT_PORT_SSL else: port = DEFAULT_PORT api = SynologyDSM( host, port, username, password, use_ssl, verify_ssl, timeout=30 ) try: serial = await self.hass.async_add_executor_job( _login_and_fetch_syno_info, api, otp_code ) except SynologyDSMLogin2SARequiredException: return await self.async_step_2sa(user_input) except SynologyDSMLogin2SAFailedException: errors[CONF_OTP_CODE] = "otp_failed" user_input[CONF_OTP_CODE] = None return await self.async_step_2sa(user_input, errors) except SynologyDSMLoginInvalidException as ex: _LOGGER.error(ex) errors[CONF_USERNAME] = "invalid_auth" except SynologyDSMRequestException as ex: _LOGGER.error(ex) errors[CONF_HOST] = "cannot_connect" except SynologyDSMException as ex: _LOGGER.error(ex) errors["base"] = "unknown" except InvalidData: errors["base"] = "missing_data" if errors: return await self._show_setup_form(user_input, errors) # Check if already configured await self.async_set_unique_id(serial, raise_on_progress=False) self._abort_if_unique_id_configured() config_data = { CONF_HOST: host, CONF_PORT: port, CONF_SSL: use_ssl, CONF_VERIFY_SSL: verify_ssl, CONF_USERNAME: username, CONF_PASSWORD: password, CONF_MAC: api.network.macs, } if otp_code: config_data["device_token"] = api.device_token if user_input.get(CONF_DISKS): config_data[CONF_DISKS] = user_input[CONF_DISKS] if user_input.get(CONF_VOLUMES): config_data[CONF_VOLUMES] = user_input[CONF_VOLUMES] return self.async_create_entry(title=host, data=config_data) async def async_step_ssdp(self, discovery_info): """Handle a discovered synology_dsm.""" parsed_url = urlparse(discovery_info[ssdp.ATTR_SSDP_LOCATION]) friendly_name = ( discovery_info[ssdp.ATTR_UPNP_FRIENDLY_NAME].split("(", 1)[0].strip() ) mac = discovery_info[ssdp.ATTR_UPNP_SERIAL].upper() # Synology NAS can broadcast on multiple IP addresses, since they can be connected to multiple ethernets. # The serial of the NAS is actually its MAC address. if self._mac_already_configured(mac): return self.async_abort(reason="already_configured") await self.async_set_unique_id(mac) self._abort_if_unique_id_configured() self.discovered_conf = { CONF_NAME: friendly_name, CONF_HOST: parsed_url.hostname, } # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 self.context["title_placeholders"] = self.discovered_conf return await self.async_step_user() async def async_step_import(self, user_input=None): """Import a config entry.""" return await self.async_step_user(user_input) async def async_step_link(self, user_input): """Link a config entry from discovery.""" return await self.async_step_user(user_input) async def async_step_2sa(self, user_input, errors=None): """Enter 2SA code to anthenticate.""" if not self.saved_user_input: self.saved_user_input = user_input if not user_input.get(CONF_OTP_CODE): return self.async_show_form( step_id="2sa", data_schema=vol.Schema({vol.Required(CONF_OTP_CODE): str}), errors=errors or {}, ) user_input = {**self.saved_user_input, **user_input} self.saved_user_input = {} return await self.async_step_user(user_input) def _mac_already_configured(self, mac): """See if we already have configured a NAS with this MAC address.""" existing_macs = [ mac.replace("-", "") for entry in self._async_current_entries() for mac in entry.data.get(CONF_MAC, []) ] return mac in existing_macs class SynologyDSMOptionsFlowHandler(config_entries.OptionsFlow): """Handle a option flow.""" def __init__(self, config_entry: config_entries.ConfigEntry): """Initialize options flow.""" self.config_entry = config_entry async def async_step_init(self, user_input=None): """Handle options flow.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) data_schema = vol.Schema( { vol.Optional( CONF_SCAN_INTERVAL, default=self.config_entry.options.get( CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL ), ): cv.positive_int, vol.Optional( CONF_TIMEOUT, default=self.config_entry.options.get( CONF_TIMEOUT, DEFAULT_TIMEOUT ), ): cv.positive_int, } ) return self.async_show_form(step_id="init", data_schema=data_schema) def _login_and_fetch_syno_info(api, otp_code): """Login to the NAS and fetch basic data.""" # These do i/o api.login(otp_code) api.utilisation.update() api.storage.update() api.network.update() if ( not api.information.serial or api.utilisation.cpu_user_load is None or not api.storage.volumes_ids or not api.network.macs ): raise InvalidData return api.information.serial class InvalidData(exceptions.HomeAssistantError): """Error to indicate we get invalid data from the nas."""
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/synology_dsm/config_flow.py
"""Plugged In Status Support for the Nissan Leaf.""" import logging from homeassistant.components.binary_sensor import BinarySensorEntity from . import DATA_CHARGING, DATA_LEAF, DATA_PLUGGED_IN, LeafEntity _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up of a Nissan Leaf binary sensor.""" if discovery_info is None: return devices = [] for vin, datastore in hass.data[DATA_LEAF].items(): _LOGGER.debug("Adding binary_sensors for vin=%s", vin) devices.append(LeafPluggedInSensor(datastore)) devices.append(LeafChargingSensor(datastore)) add_entities(devices, True) class LeafPluggedInSensor(LeafEntity, BinarySensorEntity): """Plugged In Sensor class.""" @property def name(self): """Sensor name.""" return f"{self.car.leaf.nickname} Plug Status" @property def is_on(self): """Return true if plugged in.""" return self.car.data[DATA_PLUGGED_IN] @property def icon(self): """Icon handling.""" if self.car.data[DATA_PLUGGED_IN]: return "mdi:power-plug" return "mdi:power-plug-off" class LeafChargingSensor(LeafEntity, BinarySensorEntity): """Charging Sensor class.""" @property def name(self): """Sensor name.""" return f"{self.car.leaf.nickname} Charging Status" @property def is_on(self): """Return true if charging.""" return self.car.data[DATA_CHARGING] @property def icon(self): """Icon handling.""" if self.car.data[DATA_CHARGING]: return "mdi:flash" return "mdi:flash-off"
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/nissan_leaf/binary_sensor.py
"""Errors for the Acmeda Pulse component.""" from homeassistant.exceptions import HomeAssistantError class PulseException(HomeAssistantError): """Base class for Acmeda Pulse exceptions.""" class CannotConnect(PulseException): """Unable to connect to the bridge."""
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/acmeda/errors.py
"""Provides device actions for remotes.""" from typing import List import voluptuous as vol from homeassistant.components.device_automation import toggle_entity from homeassistant.const import CONF_DOMAIN from homeassistant.core import Context, HomeAssistant from homeassistant.helpers.typing import ConfigType, TemplateVarsType from . import DOMAIN ACTION_SCHEMA = toggle_entity.ACTION_SCHEMA.extend({vol.Required(CONF_DOMAIN): DOMAIN}) async def async_call_action_from_config( hass: HomeAssistant, config: ConfigType, variables: TemplateVarsType, context: Context, ) -> None: """Change state based on configuration.""" await toggle_entity.async_call_action_from_config( hass, config, variables, context, DOMAIN ) async def async_get_actions(hass: HomeAssistant, device_id: str) -> List[dict]: """List device actions.""" return await toggle_entity.async_get_actions(hass, device_id, DOMAIN)
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/remote/device_action.py
"""Support for the Twitch stream status.""" import logging from requests.exceptions import HTTPError from twitch import TwitchClient import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONF_CLIENT_ID, CONF_TOKEN import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity _LOGGER = logging.getLogger(__name__) ATTR_GAME = "game" ATTR_TITLE = "title" ATTR_SUBSCRIPTION = "subscribed" ATTR_SUBSCRIPTION_SINCE = "subscribed_since" ATTR_SUBSCRIPTION_GIFTED = "subscription_is_gifted" ATTR_FOLLOW = "following" ATTR_FOLLOW_SINCE = "following_since" ATTR_FOLLOWING = "followers" ATTR_VIEWS = "views" CONF_CHANNELS = "channels" ICON = "mdi:twitch" STATE_OFFLINE = "offline" STATE_STREAMING = "streaming" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_CLIENT_ID): cv.string, vol.Required(CONF_CHANNELS): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_TOKEN): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Twitch platform.""" channels = config[CONF_CHANNELS] client_id = config[CONF_CLIENT_ID] oauth_token = config.get(CONF_TOKEN) client = TwitchClient(client_id, oauth_token) try: client.ingests.get_server_list() except HTTPError: _LOGGER.error("Client ID or OAuth token is not valid") return channel_ids = client.users.translate_usernames_to_ids(channels) add_entities([TwitchSensor(channel_id, client) for channel_id in channel_ids], True) class TwitchSensor(Entity): """Representation of an Twitch channel.""" def __init__(self, channel, client): """Initialize the sensor.""" self._client = client self._channel = channel self._oauth_enabled = client._oauth_token is not None self._state = None self._preview = None self._game = None self._title = None self._subscription = None self._follow = None self._statistics = None @property def name(self): """Return the name of the sensor.""" return self._channel.display_name @property def state(self): """Return the state of the sensor.""" return self._state @property def entity_picture(self): """Return preview of current game.""" return self._preview @property def device_state_attributes(self): """Return the state attributes.""" attr = dict(self._statistics) if self._oauth_enabled: attr.update(self._subscription) attr.update(self._follow) if self._state == STATE_STREAMING: attr.update({ATTR_GAME: self._game, ATTR_TITLE: self._title}) return attr @property def unique_id(self): """Return unique ID for this sensor.""" return self._channel.id @property def icon(self): """Icon to use in the frontend, if any.""" return ICON def update(self): """Update device state.""" channel = self._client.channels.get_by_id(self._channel.id) self._statistics = { ATTR_FOLLOWING: channel.followers, ATTR_VIEWS: channel.views, } if self._oauth_enabled: user = self._client.users.get() try: sub = self._client.users.check_subscribed_to_channel( user.id, self._channel.id ) self._subscription = { ATTR_SUBSCRIPTION: True, ATTR_SUBSCRIPTION_SINCE: sub.created_at, ATTR_SUBSCRIPTION_GIFTED: sub.is_gift, } except HTTPError: self._subscription = {ATTR_SUBSCRIPTION: False} try: follow = self._client.users.check_follows_channel( user.id, self._channel.id ) self._follow = {ATTR_FOLLOW: True, ATTR_FOLLOW_SINCE: follow.created_at} except HTTPError: self._follow = {ATTR_FOLLOW: False} stream = self._client.streams.get_stream_by_user(self._channel.id) if stream: self._game = stream.channel.get("game") self._title = stream.channel.get("status") self._preview = stream.preview.get("medium") self._state = STATE_STREAMING else: self._preview = self._channel.logo self._state = STATE_OFFLINE
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/twitch/sensor.py
"""Fans on Zigbee Home Automation networks.""" import functools from typing import List from zigpy.exceptions import ZigbeeException import zigpy.zcl.clusters.hvac as hvac from homeassistant.components.fan import ( DOMAIN, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, SPEED_OFF, SUPPORT_SET_SPEED, FanEntity, ) from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import State, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from .core import discovery from .core.const import ( CHANNEL_FAN, DATA_ZHA, DATA_ZHA_DISPATCHERS, SIGNAL_ADD_ENTITIES, SIGNAL_ATTR_UPDATED, ) from .core.registries import ZHA_ENTITIES from .entity import ZhaEntity, ZhaGroupEntity # Additional speeds in zigbee's ZCL # Spec is unclear as to what this value means. On King Of Fans HBUniversal # receiver, this means Very High. SPEED_ON = "on" # The fan speed is self-regulated SPEED_AUTO = "auto" # When the heated/cooled space is occupied, the fan is always on SPEED_SMART = "smart" SPEED_LIST = [ SPEED_OFF, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, SPEED_ON, SPEED_AUTO, SPEED_SMART, ] VALUE_TO_SPEED = dict(enumerate(SPEED_LIST)) SPEED_TO_VALUE = {speed: i for i, speed in enumerate(SPEED_LIST)} STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN) GROUP_MATCH = functools.partial(ZHA_ENTITIES.group_match, DOMAIN) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Zigbee Home Automation fan from config entry.""" entities_to_create = hass.data[DATA_ZHA][DOMAIN] unsub = async_dispatcher_connect( hass, SIGNAL_ADD_ENTITIES, functools.partial( discovery.async_add_entities, async_add_entities, entities_to_create ), ) hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub) class BaseFan(FanEntity): """Base representation of a ZHA fan.""" def __init__(self, *args, **kwargs): """Initialize the fan.""" super().__init__(*args, **kwargs) self._state = None self._fan_channel = None @property def speed_list(self) -> list: """Get the list of available speeds.""" return SPEED_LIST @property def speed(self) -> str: """Return the current speed.""" return self._state @property def is_on(self) -> bool: """Return true if entity is on.""" if self._state is None: return False return self._state != SPEED_OFF @property def supported_features(self) -> int: """Flag supported features.""" return SUPPORT_SET_SPEED async def async_turn_on(self, speed: str = None, **kwargs) -> None: """Turn the entity on.""" if speed is None: speed = SPEED_MEDIUM await self.async_set_speed(speed) async def async_turn_off(self, **kwargs) -> None: """Turn the entity off.""" await self.async_set_speed(SPEED_OFF) async def async_set_speed(self, speed: str) -> None: """Set the speed of the fan.""" await self._fan_channel.async_set_speed(SPEED_TO_VALUE[speed]) self.async_set_state(0, "fan_mode", speed) @callback def async_set_state(self, attr_id, attr_name, value): """Handle state update from channel.""" @STRICT_MATCH(channel_names=CHANNEL_FAN) class ZhaFan(BaseFan, ZhaEntity): """Representation of a ZHA fan.""" def __init__(self, unique_id, zha_device, channels, **kwargs): """Init this sensor.""" super().__init__(unique_id, zha_device, channels, **kwargs) self._fan_channel = self.cluster_channels.get(CHANNEL_FAN) async def async_added_to_hass(self): """Run when about to be added to hass.""" await super().async_added_to_hass() self.async_accept_signal( self._fan_channel, SIGNAL_ATTR_UPDATED, self.async_set_state ) @callback def async_restore_last_state(self, last_state): """Restore previous state.""" self._state = VALUE_TO_SPEED.get(last_state.state, last_state.state) @callback def async_set_state(self, attr_id, attr_name, value): """Handle state update from channel.""" self._state = VALUE_TO_SPEED.get(value, self._state) self.async_write_ha_state() async def async_update(self): """Attempt to retrieve on off state from the fan.""" await super().async_update() if self._fan_channel: state = await self._fan_channel.get_attribute_value("fan_mode") if state is not None: self._state = VALUE_TO_SPEED.get(state, self._state) @GROUP_MATCH() class FanGroup(BaseFan, ZhaGroupEntity): """Representation of a fan group.""" def __init__( self, entity_ids: List[str], unique_id: str, group_id: int, zha_device, **kwargs ) -> None: """Initialize a fan group.""" super().__init__(entity_ids, unique_id, group_id, zha_device, **kwargs) self._available: bool = False group = self.zha_device.gateway.get_group(self._group_id) self._fan_channel = group.endpoint[hvac.Fan.cluster_id] # what should we do with this hack? async def async_set_speed(value) -> None: """Set the speed of the fan.""" try: await self._fan_channel.write_attributes({"fan_mode": value}) except ZigbeeException as ex: self.error("Could not set speed: %s", ex) return self._fan_channel.async_set_speed = async_set_speed async def async_update(self): """Attempt to retrieve on off state from the fan.""" all_states = [self.hass.states.get(x) for x in self._entity_ids] states: List[State] = list(filter(None, all_states)) on_states: List[State] = [state for state in states if state.state != SPEED_OFF] self._available = any(state.state != STATE_UNAVAILABLE for state in states) # for now just use first non off state since its kind of arbitrary if not on_states: self._state = SPEED_OFF else: self._state = states[0].state
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/zha/fan.py
"""Support for WeMo switches.""" import asyncio from datetime import datetime, timedelta import logging import async_timeout from pywemo.ouimeaux_device.api.service import ActionException from homeassistant.components.switch import SwitchEntity from homeassistant.const import STATE_OFF, STATE_ON, STATE_STANDBY, STATE_UNKNOWN from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.util import convert from .const import DOMAIN as WEMO_DOMAIN SCAN_INTERVAL = timedelta(seconds=10) PARALLEL_UPDATES = 0 _LOGGER = logging.getLogger(__name__) # The WEMO_ constants below come from pywemo itself ATTR_SENSOR_STATE = "sensor_state" ATTR_SWITCH_MODE = "switch_mode" ATTR_CURRENT_STATE_DETAIL = "state_detail" ATTR_COFFEMAKER_MODE = "coffeemaker_mode" MAKER_SWITCH_MOMENTARY = "momentary" MAKER_SWITCH_TOGGLE = "toggle" WEMO_ON = 1 WEMO_OFF = 0 WEMO_STANDBY = 8 async def async_setup_entry(hass, config_entry, async_add_entities): """Set up WeMo switches.""" async def _discovered_wemo(device): """Handle a discovered Wemo device.""" async_add_entities([WemoSwitch(device)]) async_dispatcher_connect(hass, f"{WEMO_DOMAIN}.switch", _discovered_wemo) await asyncio.gather( *[ _discovered_wemo(device) for device in hass.data[WEMO_DOMAIN]["pending"].pop("switch") ] ) class WemoSwitch(SwitchEntity): """Representation of a WeMo switch.""" def __init__(self, device): """Initialize the WeMo switch.""" self.wemo = device self.insight_params = None self.maker_params = None self.coffeemaker_mode = None self._state = None self._mode_string = None self._available = True self._update_lock = None self._model_name = self.wemo.model_name self._name = self.wemo.name self._serialnumber = self.wemo.serialnumber def _subscription_callback(self, _device, _type, _params): """Update the state by the Wemo device.""" _LOGGER.info("Subscription update for %s", self.name) updated = self.wemo.subscription_update(_type, _params) self.hass.add_job(self._async_locked_subscription_callback(not updated)) async def _async_locked_subscription_callback(self, force_update): """Handle an update from a subscription.""" # If an update is in progress, we don't do anything if self._update_lock.locked(): return await self._async_locked_update(force_update) self.async_write_ha_state() @property def unique_id(self): """Return the ID of this WeMo switch.""" return self._serialnumber @property def name(self): """Return the name of the switch if any.""" return self._name @property def device_info(self): """Return the device info.""" return { "name": self._name, "identifiers": {(WEMO_DOMAIN, self._serialnumber)}, "model": self._model_name, "manufacturer": "Belkin", } @property def device_state_attributes(self): """Return the state attributes of the device.""" attr = {} if self.maker_params: # Is the maker sensor on or off. if self.maker_params["hassensor"]: # Note a state of 1 matches the WeMo app 'not triggered'! if self.maker_params["sensorstate"]: attr[ATTR_SENSOR_STATE] = STATE_OFF else: attr[ATTR_SENSOR_STATE] = STATE_ON # Is the maker switch configured as toggle(0) or momentary (1). if self.maker_params["switchmode"]: attr[ATTR_SWITCH_MODE] = MAKER_SWITCH_MOMENTARY else: attr[ATTR_SWITCH_MODE] = MAKER_SWITCH_TOGGLE if self.insight_params or (self.coffeemaker_mode is not None): attr[ATTR_CURRENT_STATE_DETAIL] = self.detail_state if self.insight_params: attr["on_latest_time"] = WemoSwitch.as_uptime(self.insight_params["onfor"]) attr["on_today_time"] = WemoSwitch.as_uptime(self.insight_params["ontoday"]) attr["on_total_time"] = WemoSwitch.as_uptime(self.insight_params["ontotal"]) attr["power_threshold_w"] = ( convert(self.insight_params["powerthreshold"], float, 0.0) / 1000.0 ) if self.coffeemaker_mode is not None: attr[ATTR_COFFEMAKER_MODE] = self.coffeemaker_mode return attr @staticmethod def as_uptime(_seconds): """Format seconds into uptime string in the format: 00d 00h 00m 00s.""" uptime = datetime(1, 1, 1) + timedelta(seconds=_seconds) return "{:0>2d}d {:0>2d}h {:0>2d}m {:0>2d}s".format( uptime.day - 1, uptime.hour, uptime.minute, uptime.second ) @property def current_power_w(self): """Return the current power usage in W.""" if self.insight_params: return convert(self.insight_params["currentpower"], float, 0.0) / 1000.0 @property def today_energy_kwh(self): """Return the today total energy usage in kWh.""" if self.insight_params: miliwatts = convert(self.insight_params["todaymw"], float, 0.0) return round(miliwatts / (1000.0 * 1000.0 * 60), 2) @property def detail_state(self): """Return the state of the device.""" if self.coffeemaker_mode is not None: return self._mode_string if self.insight_params: standby_state = int(self.insight_params["state"]) if standby_state == WEMO_ON: return STATE_ON if standby_state == WEMO_OFF: return STATE_OFF if standby_state == WEMO_STANDBY: return STATE_STANDBY return STATE_UNKNOWN @property def is_on(self): """Return true if switch is on. Standby is on.""" return self._state @property def available(self): """Return true if switch is available.""" return self._available @property def icon(self): """Return the icon of device based on its type.""" if self._model_name == "CoffeeMaker": return "mdi:coffee" return None def turn_on(self, **kwargs): """Turn the switch on.""" try: if self.wemo.on(): self._state = WEMO_ON except ActionException as err: _LOGGER.warning("Error while turning on device %s (%s)", self.name, err) self._available = False self.schedule_update_ha_state() def turn_off(self, **kwargs): """Turn the switch off.""" try: if self.wemo.off(): self._state = WEMO_OFF except ActionException as err: _LOGGER.warning("Error while turning off device %s (%s)", self.name, err) self._available = False self.schedule_update_ha_state() async def async_added_to_hass(self): """Wemo switch added to Home Assistant.""" # Define inside async context so we know our event loop self._update_lock = asyncio.Lock() registry = self.hass.data[WEMO_DOMAIN]["registry"] await self.hass.async_add_executor_job(registry.register, self.wemo) registry.on(self.wemo, None, self._subscription_callback) async def async_update(self): """Update WeMo state. Wemo has an aggressive retry logic that sometimes can take over a minute to return. If we don't get a state after 5 seconds, assume the Wemo switch is unreachable. If update goes through, it will be made available again. """ # If an update is in progress, we don't do anything if self._update_lock.locked(): return try: with async_timeout.timeout(5): await asyncio.shield(self._async_locked_update(True)) except asyncio.TimeoutError: _LOGGER.warning("Lost connection to %s", self.name) self._available = False async def _async_locked_update(self, force_update): """Try updating within an async lock.""" async with self._update_lock: await self.hass.async_add_executor_job(self._update, force_update) def _update(self, force_update): """Update the device state.""" try: self._state = self.wemo.get_state(force_update) if self._model_name == "Insight": self.insight_params = self.wemo.insight_params self.insight_params["standby_state"] = self.wemo.get_standby_state elif self._model_name == "Maker": self.maker_params = self.wemo.maker_params elif self._model_name == "CoffeeMaker": self.coffeemaker_mode = self.wemo.mode self._mode_string = self.wemo.mode_string if not self._available: _LOGGER.info("Reconnected to %s", self.name) self._available = True except (AttributeError, ActionException) as err: _LOGGER.warning("Could not update status for %s (%s)", self.name, err) self._available = False self.wemo.reconnect_with_device()
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/wemo/switch.py
"""Support for the Amazon Polly text to speech service.""" import logging import boto3 import voluptuous as vol from homeassistant.components.tts import PLATFORM_SCHEMA, Provider import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_REGION = "region_name" CONF_ACCESS_KEY_ID = "aws_access_key_id" CONF_SECRET_ACCESS_KEY = "aws_secret_access_key" CONF_PROFILE_NAME = "profile_name" ATTR_CREDENTIALS = "credentials" DEFAULT_REGION = "us-east-1" SUPPORTED_REGIONS = [ "us-east-1", "us-east-2", "us-west-1", "us-west-2", "ca-central-1", "eu-west-1", "eu-central-1", "eu-west-2", "eu-west-3", "ap-southeast-1", "ap-southeast-2", "ap-northeast-2", "ap-northeast-1", "ap-south-1", "sa-east-1", ] CONF_ENGINE = "engine" CONF_VOICE = "voice" CONF_OUTPUT_FORMAT = "output_format" CONF_SAMPLE_RATE = "sample_rate" CONF_TEXT_TYPE = "text_type" SUPPORTED_VOICES = [ "Zhiyu", # Chinese "Mads", "Naja", # Danish "Ruben", "Lotte", # Dutch "Russell", "Nicole", # English Australian "Brian", "Amy", "Emma", # English "Aditi", "Raveena", # English, Indian "Joey", "Justin", "Matthew", "Ivy", "Joanna", "Kendra", "Kimberly", "Salli", # English "Geraint", # English Welsh "Mathieu", "Celine", "Lea", # French "Chantal", # French Canadian "Hans", "Marlene", "Vicki", # German "Aditi", # Hindi "Karl", "Dora", # Icelandic "Giorgio", "Carla", "Bianca", # Italian "Takumi", "Mizuki", # Japanese "Seoyeon", # Korean "Liv", # Norwegian "Jacek", "Jan", "Ewa", "Maja", # Polish "Ricardo", "Vitoria", # Portuguese, Brazilian "Cristiano", "Ines", # Portuguese, European "Carmen", # Romanian "Maxim", "Tatyana", # Russian "Enrique", "Conchita", "Lucia", # Spanish European "Mia", # Spanish Mexican "Miguel", # Spanish US "Penelope", # Spanish US "Lupe", # Spanish US "Astrid", # Swedish "Filiz", # Turkish "Gwyneth", # Welsh ] SUPPORTED_OUTPUT_FORMATS = ["mp3", "ogg_vorbis", "pcm"] SUPPORTED_ENGINES = ["neural", "standard"] SUPPORTED_SAMPLE_RATES = ["8000", "16000", "22050", "24000"] SUPPORTED_SAMPLE_RATES_MAP = { "mp3": ["8000", "16000", "22050", "24000"], "ogg_vorbis": ["8000", "16000", "22050"], "pcm": ["8000", "16000"], } SUPPORTED_TEXT_TYPES = ["text", "ssml"] CONTENT_TYPE_EXTENSIONS = {"audio/mpeg": "mp3", "audio/ogg": "ogg", "audio/pcm": "pcm"} DEFAULT_ENGINE = "standard" DEFAULT_VOICE = "Joanna" DEFAULT_OUTPUT_FORMAT = "mp3" DEFAULT_TEXT_TYPE = "text" DEFAULT_SAMPLE_RATES = {"mp3": "22050", "ogg_vorbis": "22050", "pcm": "16000"} PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_REGION, default=DEFAULT_REGION): vol.In(SUPPORTED_REGIONS), vol.Inclusive(CONF_ACCESS_KEY_ID, ATTR_CREDENTIALS): cv.string, vol.Inclusive(CONF_SECRET_ACCESS_KEY, ATTR_CREDENTIALS): cv.string, vol.Exclusive(CONF_PROFILE_NAME, ATTR_CREDENTIALS): cv.string, vol.Optional(CONF_VOICE, default=DEFAULT_VOICE): vol.In(SUPPORTED_VOICES), vol.Optional(CONF_ENGINE, default=DEFAULT_ENGINE): vol.In(SUPPORTED_ENGINES), vol.Optional(CONF_OUTPUT_FORMAT, default=DEFAULT_OUTPUT_FORMAT): vol.In( SUPPORTED_OUTPUT_FORMATS ), vol.Optional(CONF_SAMPLE_RATE): vol.All( cv.string, vol.In(SUPPORTED_SAMPLE_RATES) ), vol.Optional(CONF_TEXT_TYPE, default=DEFAULT_TEXT_TYPE): vol.In( SUPPORTED_TEXT_TYPES ), } ) def get_engine(hass, config, discovery_info=None): """Set up Amazon Polly speech component.""" output_format = config[CONF_OUTPUT_FORMAT] sample_rate = config.get(CONF_SAMPLE_RATE, DEFAULT_SAMPLE_RATES[output_format]) if sample_rate not in SUPPORTED_SAMPLE_RATES_MAP.get(output_format): _LOGGER.error( "%s is not a valid sample rate for %s", sample_rate, output_format ) return None config[CONF_SAMPLE_RATE] = sample_rate profile = config.get(CONF_PROFILE_NAME) if profile is not None: boto3.setup_default_session(profile_name=profile) aws_config = { CONF_REGION: config[CONF_REGION], CONF_ACCESS_KEY_ID: config.get(CONF_ACCESS_KEY_ID), CONF_SECRET_ACCESS_KEY: config.get(CONF_SECRET_ACCESS_KEY), } del config[CONF_REGION] del config[CONF_ACCESS_KEY_ID] del config[CONF_SECRET_ACCESS_KEY] polly_client = boto3.client("polly", **aws_config) supported_languages = [] all_voices = {} all_voices_req = polly_client.describe_voices() for voice in all_voices_req.get("Voices"): all_voices[voice.get("Id")] = voice if voice.get("LanguageCode") not in supported_languages: supported_languages.append(voice.get("LanguageCode")) return AmazonPollyProvider(polly_client, config, supported_languages, all_voices) class AmazonPollyProvider(Provider): """Amazon Polly speech api provider.""" def __init__(self, polly_client, config, supported_languages, all_voices): """Initialize Amazon Polly provider for TTS.""" self.client = polly_client self.config = config self.supported_langs = supported_languages self.all_voices = all_voices self.default_voice = self.config[CONF_VOICE] self.name = "Amazon Polly" @property def supported_languages(self): """Return a list of supported languages.""" return self.supported_langs @property def default_language(self): """Return the default language.""" return self.all_voices.get(self.default_voice).get("LanguageCode") @property def default_options(self): """Return dict include default options.""" return {CONF_VOICE: self.default_voice} @property def supported_options(self): """Return a list of supported options.""" return [CONF_VOICE] def get_tts_audio(self, message, language=None, options=None): """Request TTS file from Polly.""" voice_id = options.get(CONF_VOICE, self.default_voice) voice_in_dict = self.all_voices.get(voice_id) if language != voice_in_dict.get("LanguageCode"): _LOGGER.error("%s does not support the %s language", voice_id, language) return None, None resp = self.client.synthesize_speech( Engine=self.config[CONF_ENGINE], OutputFormat=self.config[CONF_OUTPUT_FORMAT], SampleRate=self.config[CONF_SAMPLE_RATE], Text=message, TextType=self.config[CONF_TEXT_TYPE], VoiceId=voice_id, ) return ( CONTENT_TYPE_EXTENSIONS[resp.get("ContentType")], resp.get("AudioStream").read(), )
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/amazon_polly/tts.py
"""Rest API for Home Assistant.""" import asyncio import json import logging from aiohttp import web from aiohttp.web_exceptions import HTTPBadRequest import async_timeout import voluptuous as vol from homeassistant.auth.permissions.const import POLICY_READ from homeassistant.bootstrap import DATA_LOGGING from homeassistant.components.http import HomeAssistantView from homeassistant.const import ( EVENT_HOMEASSISTANT_STOP, EVENT_TIME_CHANGED, HTTP_BAD_REQUEST, HTTP_CREATED, HTTP_NOT_FOUND, HTTP_OK, MATCH_ALL, URL_API, URL_API_COMPONENTS, URL_API_CONFIG, URL_API_DISCOVERY_INFO, URL_API_ERROR_LOG, URL_API_EVENTS, URL_API_SERVICES, URL_API_STATES, URL_API_STREAM, URL_API_TEMPLATE, __version__, ) import homeassistant.core as ha from homeassistant.exceptions import ServiceNotFound, TemplateError, Unauthorized from homeassistant.helpers import template from homeassistant.helpers.json import JSONEncoder from homeassistant.helpers.network import NoURLAvailableError, get_url from homeassistant.helpers.service import async_get_all_descriptions from homeassistant.helpers.state import AsyncTrackStates from homeassistant.helpers.system_info import async_get_system_info _LOGGER = logging.getLogger(__name__) ATTR_BASE_URL = "base_url" ATTR_EXTERNAL_URL = "external_url" ATTR_INTERNAL_URL = "internal_url" ATTR_LOCATION_NAME = "location_name" ATTR_INSTALLATION_TYPE = "installation_type" ATTR_REQUIRES_API_PASSWORD = "requires_api_password" ATTR_UUID = "uuid" ATTR_VERSION = "version" DOMAIN = "api" STREAM_PING_PAYLOAD = "ping" STREAM_PING_INTERVAL = 50 # seconds def setup(hass, config): """Register the API with the HTTP interface.""" hass.http.register_view(APIStatusView) hass.http.register_view(APIEventStream) hass.http.register_view(APIConfigView) hass.http.register_view(APIDiscoveryView) hass.http.register_view(APIStatesView) hass.http.register_view(APIEntityStateView) hass.http.register_view(APIEventListenersView) hass.http.register_view(APIEventView) hass.http.register_view(APIServicesView) hass.http.register_view(APIDomainServicesView) hass.http.register_view(APIComponentsView) hass.http.register_view(APITemplateView) if DATA_LOGGING in hass.data: hass.http.register_view(APIErrorLog) return True class APIStatusView(HomeAssistantView): """View to handle Status requests.""" url = URL_API name = "api:status" @ha.callback def get(self, request): """Retrieve if API is running.""" return self.json_message("API running.") class APIEventStream(HomeAssistantView): """View to handle EventStream requests.""" url = URL_API_STREAM name = "api:stream" async def get(self, request): """Provide a streaming interface for the event bus.""" if not request["hass_user"].is_admin: raise Unauthorized() hass = request.app["hass"] stop_obj = object() to_write = asyncio.Queue() restrict = request.query.get("restrict") if restrict: restrict = restrict.split(",") + [EVENT_HOMEASSISTANT_STOP] async def forward_events(event): """Forward events to the open request.""" if event.event_type == EVENT_TIME_CHANGED: return if restrict and event.event_type not in restrict: return _LOGGER.debug("STREAM %s FORWARDING %s", id(stop_obj), event) if event.event_type == EVENT_HOMEASSISTANT_STOP: data = stop_obj else: data = json.dumps(event, cls=JSONEncoder) await to_write.put(data) response = web.StreamResponse() response.content_type = "text/event-stream" await response.prepare(request) unsub_stream = hass.bus.async_listen(MATCH_ALL, forward_events) try: _LOGGER.debug("STREAM %s ATTACHED", id(stop_obj)) # Fire off one message so browsers fire open event right away await to_write.put(STREAM_PING_PAYLOAD) while True: try: with async_timeout.timeout(STREAM_PING_INTERVAL): payload = await to_write.get() if payload is stop_obj: break msg = f"data: {payload}\n\n" _LOGGER.debug("STREAM %s WRITING %s", id(stop_obj), msg.strip()) await response.write(msg.encode("UTF-8")) except asyncio.TimeoutError: await to_write.put(STREAM_PING_PAYLOAD) except asyncio.CancelledError: _LOGGER.debug("STREAM %s ABORT", id(stop_obj)) finally: _LOGGER.debug("STREAM %s RESPONSE CLOSED", id(stop_obj)) unsub_stream() return response class APIConfigView(HomeAssistantView): """View to handle Configuration requests.""" url = URL_API_CONFIG name = "api:config" @ha.callback def get(self, request): """Get current configuration.""" return self.json(request.app["hass"].config.as_dict()) class APIDiscoveryView(HomeAssistantView): """View to provide Discovery information.""" requires_auth = False url = URL_API_DISCOVERY_INFO name = "api:discovery" async def get(self, request): """Get discovery information.""" hass = request.app["hass"] uuid = await hass.helpers.instance_id.async_get() system_info = await async_get_system_info(hass) data = { ATTR_UUID: uuid, ATTR_BASE_URL: None, ATTR_EXTERNAL_URL: None, ATTR_INTERNAL_URL: None, ATTR_LOCATION_NAME: hass.config.location_name, ATTR_INSTALLATION_TYPE: system_info[ATTR_INSTALLATION_TYPE], # always needs authentication ATTR_REQUIRES_API_PASSWORD: True, ATTR_VERSION: __version__, } try: data["external_url"] = get_url(hass, allow_internal=False) except NoURLAvailableError: pass try: data["internal_url"] = get_url(hass, allow_external=False) except NoURLAvailableError: pass # Set old base URL based on external or internal data["base_url"] = data["external_url"] or data["internal_url"] return self.json(data) class APIStatesView(HomeAssistantView): """View to handle States requests.""" url = URL_API_STATES name = "api:states" @ha.callback def get(self, request): """Get current states.""" user = request["hass_user"] entity_perm = user.permissions.check_entity states = [ state for state in request.app["hass"].states.async_all() if entity_perm(state.entity_id, "read") ] return self.json(states) class APIEntityStateView(HomeAssistantView): """View to handle EntityState requests.""" url = "/api/states/{entity_id}" name = "api:entity-state" @ha.callback def get(self, request, entity_id): """Retrieve state of entity.""" user = request["hass_user"] if not user.permissions.check_entity(entity_id, POLICY_READ): raise Unauthorized(entity_id=entity_id) state = request.app["hass"].states.get(entity_id) if state: return self.json(state) return self.json_message("Entity not found.", HTTP_NOT_FOUND) async def post(self, request, entity_id): """Update state of entity.""" if not request["hass_user"].is_admin: raise Unauthorized(entity_id=entity_id) hass = request.app["hass"] try: data = await request.json() except ValueError: return self.json_message("Invalid JSON specified.", HTTP_BAD_REQUEST) new_state = data.get("state") if new_state is None: return self.json_message("No state specified.", HTTP_BAD_REQUEST) attributes = data.get("attributes") force_update = data.get("force_update", False) is_new_state = hass.states.get(entity_id) is None # Write state hass.states.async_set( entity_id, new_state, attributes, force_update, self.context(request) ) # Read the state back for our response status_code = HTTP_CREATED if is_new_state else HTTP_OK resp = self.json(hass.states.get(entity_id), status_code) resp.headers.add("Location", f"/api/states/{entity_id}") return resp @ha.callback def delete(self, request, entity_id): """Remove entity.""" if not request["hass_user"].is_admin: raise Unauthorized(entity_id=entity_id) if request.app["hass"].states.async_remove(entity_id): return self.json_message("Entity removed.") return self.json_message("Entity not found.", HTTP_NOT_FOUND) class APIEventListenersView(HomeAssistantView): """View to handle EventListeners requests.""" url = URL_API_EVENTS name = "api:event-listeners" @ha.callback def get(self, request): """Get event listeners.""" return self.json(async_events_json(request.app["hass"])) class APIEventView(HomeAssistantView): """View to handle Event requests.""" url = "/api/events/{event_type}" name = "api:event" async def post(self, request, event_type): """Fire events.""" if not request["hass_user"].is_admin: raise Unauthorized() body = await request.text() try: event_data = json.loads(body) if body else None except ValueError: return self.json_message( "Event data should be valid JSON.", HTTP_BAD_REQUEST ) if event_data is not None and not isinstance(event_data, dict): return self.json_message( "Event data should be a JSON object", HTTP_BAD_REQUEST ) # Special case handling for event STATE_CHANGED # We will try to convert state dicts back to State objects if event_type == ha.EVENT_STATE_CHANGED and event_data: for key in ("old_state", "new_state"): state = ha.State.from_dict(event_data.get(key)) if state: event_data[key] = state request.app["hass"].bus.async_fire( event_type, event_data, ha.EventOrigin.remote, self.context(request) ) return self.json_message(f"Event {event_type} fired.") class APIServicesView(HomeAssistantView): """View to handle Services requests.""" url = URL_API_SERVICES name = "api:services" async def get(self, request): """Get registered services.""" services = await async_services_json(request.app["hass"]) return self.json(services) class APIDomainServicesView(HomeAssistantView): """View to handle DomainServices requests.""" url = "/api/services/{domain}/{service}" name = "api:domain-services" async def post(self, request, domain, service): """Call a service. Returns a list of changed states. """ hass = request.app["hass"] body = await request.text() try: data = json.loads(body) if body else None except ValueError: return self.json_message("Data should be valid JSON.", HTTP_BAD_REQUEST) with AsyncTrackStates(hass) as changed_states: try: await hass.services.async_call( domain, service, data, True, self.context(request) ) except (vol.Invalid, ServiceNotFound) as ex: raise HTTPBadRequest() from ex return self.json(changed_states) class APIComponentsView(HomeAssistantView): """View to handle Components requests.""" url = URL_API_COMPONENTS name = "api:components" @ha.callback def get(self, request): """Get current loaded components.""" return self.json(request.app["hass"].config.components) class APITemplateView(HomeAssistantView): """View to handle Template requests.""" url = URL_API_TEMPLATE name = "api:template" async def post(self, request): """Render a template.""" if not request["hass_user"].is_admin: raise Unauthorized() try: data = await request.json() tpl = template.Template(data["template"], request.app["hass"]) return tpl.async_render(variables=data.get("variables"), parse_result=False) except (ValueError, TemplateError) as ex: return self.json_message( f"Error rendering template: {ex}", HTTP_BAD_REQUEST ) class APIErrorLog(HomeAssistantView): """View to fetch the API error log.""" url = URL_API_ERROR_LOG name = "api:error_log" async def get(self, request): """Retrieve API error log.""" if not request["hass_user"].is_admin: raise Unauthorized() return web.FileResponse(request.app["hass"].data[DATA_LOGGING]) async def async_services_json(hass): """Generate services data to JSONify.""" descriptions = await async_get_all_descriptions(hass) return [{"domain": key, "services": value} for key, value in descriptions.items()] @ha.callback def async_events_json(hass): """Generate event data to JSONify.""" return [ {"event": key, "listener_count": value} for key, value in hass.bus.async_listeners().items() ]
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/api/__init__.py
"""Support for non-delivered packages recorded in AfterShip.""" from datetime import timedelta import logging from pyaftership.tracker import Tracking import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ATTR_ATTRIBUTION, CONF_API_KEY, CONF_NAME, HTTP_OK from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle from .const import DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Information provided by AfterShip" ATTR_TRACKINGS = "trackings" BASE = "https://track.aftership.com/" CONF_SLUG = "slug" CONF_TITLE = "title" CONF_TRACKING_NUMBER = "tracking_number" DEFAULT_NAME = "aftership" UPDATE_TOPIC = f"{DOMAIN}_update" ICON = "mdi:package-variant-closed" MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15) SERVICE_ADD_TRACKING = "add_tracking" SERVICE_REMOVE_TRACKING = "remove_tracking" ADD_TRACKING_SERVICE_SCHEMA = vol.Schema( { vol.Required(CONF_TRACKING_NUMBER): cv.string, vol.Optional(CONF_TITLE): cv.string, vol.Optional(CONF_SLUG): cv.string, } ) REMOVE_TRACKING_SERVICE_SCHEMA = vol.Schema( {vol.Required(CONF_SLUG): cv.string, vol.Required(CONF_TRACKING_NUMBER): cv.string} ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the AfterShip sensor platform.""" apikey = config[CONF_API_KEY] name = config[CONF_NAME] session = async_get_clientsession(hass) aftership = Tracking(hass.loop, session, apikey) await aftership.get_trackings() if not aftership.meta or aftership.meta["code"] != HTTP_OK: _LOGGER.error( "No tracking data found. Check API key is correct: %s", aftership.meta ) return instance = AfterShipSensor(aftership, name) async_add_entities([instance], True) async def handle_add_tracking(call): """Call when a user adds a new Aftership tracking from Home Assistant.""" title = call.data.get(CONF_TITLE) slug = call.data.get(CONF_SLUG) tracking_number = call.data[CONF_TRACKING_NUMBER] await aftership.add_package_tracking(tracking_number, title, slug) async_dispatcher_send(hass, UPDATE_TOPIC) hass.services.async_register( DOMAIN, SERVICE_ADD_TRACKING, handle_add_tracking, schema=ADD_TRACKING_SERVICE_SCHEMA, ) async def handle_remove_tracking(call): """Call when a user removes an Aftership tracking from Home Assistant.""" slug = call.data[CONF_SLUG] tracking_number = call.data[CONF_TRACKING_NUMBER] await aftership.remove_package_tracking(slug, tracking_number) async_dispatcher_send(hass, UPDATE_TOPIC) hass.services.async_register( DOMAIN, SERVICE_REMOVE_TRACKING, handle_remove_tracking, schema=REMOVE_TRACKING_SERVICE_SCHEMA, ) class AfterShipSensor(Entity): """Representation of a AfterShip sensor.""" def __init__(self, aftership, name): """Initialize the sensor.""" self._attributes = {} self._name = name self._state = None self.aftership = aftership @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return "packages" @property def device_state_attributes(self): """Return attributes for the sensor.""" return self._attributes @property def icon(self): """Icon to use in the frontend.""" return ICON async def async_added_to_hass(self): """Register callbacks.""" self.async_on_remove( self.hass.helpers.dispatcher.async_dispatcher_connect( UPDATE_TOPIC, self._force_update ) ) async def _force_update(self): """Force update of data.""" await self.async_update(no_throttle=True) self.async_write_ha_state() @Throttle(MIN_TIME_BETWEEN_UPDATES) async def async_update(self, **kwargs): """Get the latest data from the AfterShip API.""" await self.aftership.get_trackings() if not self.aftership.meta: _LOGGER.error("Unknown errors when querying") return if self.aftership.meta["code"] != HTTP_OK: _LOGGER.error( "Errors when querying AfterShip. %s", str(self.aftership.meta) ) return status_to_ignore = {"delivered"} status_counts = {} trackings = [] not_delivered_count = 0 for track in self.aftership.trackings["trackings"]: status = track["tag"].lower() name = ( track["tracking_number"] if track["title"] is None else track["title"] ) last_checkpoint = ( "Shipment pending" if track["tag"] == "Pending" else track["checkpoints"][-1] ) status_counts[status] = status_counts.get(status, 0) + 1 trackings.append( { "name": name, "tracking_number": track["tracking_number"], "slug": track["slug"], "link": f"{BASE}{track['slug']}/{track['tracking_number']}", "last_update": track["updated_at"], "expected_delivery": track["expected_delivery"], "status": track["tag"], "last_checkpoint": last_checkpoint, } ) if status not in status_to_ignore: not_delivered_count += 1 else: _LOGGER.debug("Ignoring %s as it has status: %s", name, status) self._attributes = { ATTR_ATTRIBUTION: ATTRIBUTION, **status_counts, ATTR_TRACKINGS: trackings, } self._state = not_delivered_count
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/aftership/sensor.py
"""Config flow to configure the GDACS integration.""" import logging import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_SCAN_INTERVAL, ) from homeassistant.helpers import config_validation as cv from .const import ( # pylint: disable=unused-import CONF_CATEGORIES, DEFAULT_RADIUS, DEFAULT_SCAN_INTERVAL, DOMAIN, ) DATA_SCHEMA = vol.Schema( {vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS): cv.positive_int} ) _LOGGER = logging.getLogger(__name__) class GdacsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a GDACS config flow.""" CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL async def _show_form(self, errors=None): """Show the form to the user.""" return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors or {} ) async def async_step_import(self, import_config): """Import a config entry from configuration.yaml.""" return await self.async_step_user(import_config) async def async_step_user(self, user_input=None): """Handle the start of the config flow.""" _LOGGER.debug("User input: %s", user_input) if not user_input: return await self._show_form() latitude = user_input.get(CONF_LATITUDE, self.hass.config.latitude) user_input[CONF_LATITUDE] = latitude longitude = user_input.get(CONF_LONGITUDE, self.hass.config.longitude) user_input[CONF_LONGITUDE] = longitude identifier = f"{user_input[CONF_LATITUDE]}, {user_input[CONF_LONGITUDE]}" await self.async_set_unique_id(identifier) self._abort_if_unique_id_configured() scan_interval = user_input.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) user_input[CONF_SCAN_INTERVAL] = scan_interval.seconds categories = user_input.get(CONF_CATEGORIES, []) user_input[CONF_CATEGORIES] = categories return self.async_create_entry(title=identifier, data=user_input)
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/gdacs/config_flow.py
"""Support for Lutron Homeworks lights.""" import logging from pyhomeworks.pyhomeworks import HW_LIGHT_CHANGED from homeassistant.components.light import ( ATTR_BRIGHTNESS, SUPPORT_BRIGHTNESS, LightEntity, ) from homeassistant.const import CONF_NAME from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from . import CONF_ADDR, CONF_DIMMERS, CONF_RATE, HOMEWORKS_CONTROLLER, HomeworksDevice _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discover_info=None): """Set up Homeworks lights.""" if discover_info is None: return controller = hass.data[HOMEWORKS_CONTROLLER] devs = [] for dimmer in discover_info[CONF_DIMMERS]: dev = HomeworksLight( controller, dimmer[CONF_ADDR], dimmer[CONF_NAME], dimmer[CONF_RATE] ) devs.append(dev) add_entities(devs, True) class HomeworksLight(HomeworksDevice, LightEntity): """Homeworks Light.""" def __init__(self, controller, addr, name, rate): """Create device with Addr, name, and rate.""" super().__init__(controller, addr, name) self._rate = rate self._level = 0 self._prev_level = 0 async def async_added_to_hass(self): """Call when entity is added to hass.""" signal = f"homeworks_entity_{self._addr}" _LOGGER.debug("connecting %s", signal) self.async_on_remove( async_dispatcher_connect(self.hass, signal, self._update_callback) ) self._controller.request_dimmer_level(self._addr) @property def supported_features(self): """Supported features.""" return SUPPORT_BRIGHTNESS def turn_on(self, **kwargs): """Turn on the light.""" if ATTR_BRIGHTNESS in kwargs: new_level = kwargs[ATTR_BRIGHTNESS] elif self._prev_level == 0: new_level = 255 else: new_level = self._prev_level self._set_brightness(new_level) def turn_off(self, **kwargs): """Turn off the light.""" self._set_brightness(0) @property def brightness(self): """Control the brightness.""" return self._level def _set_brightness(self, level): """Send the brightness level to the device.""" self._controller.fade_dim( float((level * 100.0) / 255.0), self._rate, 0, self._addr ) @property def device_state_attributes(self): """Supported attributes.""" return {"homeworks_address": self._addr} @property def is_on(self): """Is the light on/off.""" return self._level != 0 @callback def _update_callback(self, msg_type, values): """Process device specific messages.""" if msg_type == HW_LIGHT_CHANGED: self._level = int((values[1] * 255.0) / 100.0) if self._level != 0: self._prev_level = self._level self.async_write_ha_state()
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/homeworks/light.py
"""Support for Plaato Airlock sensors.""" import logging from homeassistant.const import PERCENTAGE from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import Entity from . import ( ATTR_ABV, ATTR_BATCH_VOLUME, ATTR_BPM, ATTR_CO2_VOLUME, ATTR_TEMP, ATTR_TEMP_UNIT, ATTR_VOLUME_UNIT, DOMAIN as PLAATO_DOMAIN, PLAATO_DEVICE_ATTRS, PLAATO_DEVICE_SENSORS, SENSOR_DATA_KEY, SENSOR_UPDATE, ) _LOGGER = logging.getLogger(__name__) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Plaato sensor.""" async def async_setup_entry(hass, config_entry, async_add_entities): """Set up Plaato from a config entry.""" devices = {} def get_device(device_id): """Get a device.""" return hass.data[PLAATO_DOMAIN].get(device_id, False) def get_device_sensors(device_id): """Get device sensors.""" return hass.data[PLAATO_DOMAIN].get(device_id).get(PLAATO_DEVICE_SENSORS) async def _update_sensor(device_id): """Update/Create the sensors.""" if device_id not in devices and get_device(device_id): entities = [] sensors = get_device_sensors(device_id) for sensor_type in sensors: entities.append(PlaatoSensor(device_id, sensor_type)) devices[device_id] = entities async_add_entities(entities, True) else: for entity in devices[device_id]: async_dispatcher_send(hass, f"{PLAATO_DOMAIN}_{entity.unique_id}") hass.data[SENSOR_DATA_KEY] = async_dispatcher_connect( hass, SENSOR_UPDATE, _update_sensor ) return True class PlaatoSensor(Entity): """Representation of a Sensor.""" def __init__(self, device_id, sensor_type): """Initialize the sensor.""" self._device_id = device_id self._type = sensor_type self._state = 0 self._name = f"{device_id} {sensor_type}" self._attributes = None @property def name(self): """Return the name of the sensor.""" return f"{PLAATO_DOMAIN} {self._name}" @property def unique_id(self): """Return the unique ID of this sensor.""" return f"{self._device_id}_{self._type}" @property def device_info(self): """Get device info.""" return { "identifiers": {(PLAATO_DOMAIN, self._device_id)}, "name": self._device_id, "manufacturer": "Plaato", "model": "Airlock", } def get_sensors(self): """Get device sensors.""" return ( self.hass.data[PLAATO_DOMAIN] .get(self._device_id) .get(PLAATO_DEVICE_SENSORS, False) ) def get_sensors_unit_of_measurement(self, sensor_type): """Get unit of measurement for sensor of type.""" return ( self.hass.data[PLAATO_DOMAIN] .get(self._device_id) .get(PLAATO_DEVICE_ATTRS, []) .get(sensor_type, "") ) @property def state(self): """Return the state of the sensor.""" sensors = self.get_sensors() if sensors is False: _LOGGER.debug("Device with name %s has no sensors", self.name) return 0 if self._type == ATTR_ABV: return round(sensors.get(self._type), 2) if self._type == ATTR_TEMP: return round(sensors.get(self._type), 1) if self._type == ATTR_CO2_VOLUME: return round(sensors.get(self._type), 2) return sensors.get(self._type) @property def device_state_attributes(self): """Return the state attributes of the monitored installation.""" if self._attributes is not None: return self._attributes @property def unit_of_measurement(self): """Return the unit of measurement.""" if self._type == ATTR_TEMP: return self.get_sensors_unit_of_measurement(ATTR_TEMP_UNIT) if self._type == ATTR_BATCH_VOLUME or self._type == ATTR_CO2_VOLUME: return self.get_sensors_unit_of_measurement(ATTR_VOLUME_UNIT) if self._type == ATTR_BPM: return "bpm" if self._type == ATTR_ABV: return PERCENTAGE return "" @property def should_poll(self): """Return the polling state.""" return False async def async_added_to_hass(self): """Register callbacks.""" self.async_on_remove( self.hass.helpers.dispatcher.async_dispatcher_connect( f"{PLAATO_DOMAIN}_{self.unique_id}", self.async_write_ha_state ) )
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/plaato/sensor.py
"""Constants used by the Withings component.""" from enum import Enum import homeassistant.const as const CONF_PROFILES = "profiles" CONF_USE_WEBHOOK = "use_webhook" DATA_MANAGER = "data_manager" CONFIG = "config" DOMAIN = "withings" LOG_NAMESPACE = "homeassistant.components.withings" PROFILE = "profile" PUSH_HANDLER = "push_handler" CONF_WEBHOOK_URL = "webhook_url" class Measurement(Enum): """Measurement supported by the withings integration.""" BODY_TEMP_C = "body_temperature_c" BONE_MASS_KG = "bone_mass_kg" DIASTOLIC_MMHG = "diastolic_blood_pressure_mmhg" FAT_FREE_MASS_KG = "fat_free_mass_kg" FAT_MASS_KG = "fat_mass_kg" FAT_RATIO_PCT = "fat_ratio_pct" HEART_PULSE_BPM = "heart_pulse_bpm" HEIGHT_M = "height_m" HYDRATION = "hydration" IN_BED = "in_bed" MUSCLE_MASS_KG = "muscle_mass_kg" PWV = "pulse_wave_velocity" SKIN_TEMP_C = "skin_temperature_c" SLEEP_BREATHING_DISTURBANCES_INTENSITY = "sleep_breathing_disturbances_intensity" SLEEP_DEEP_DURATION_SECONDS = "sleep_deep_duration_seconds" SLEEP_HEART_RATE_AVERAGE = "sleep_heart_rate_average_bpm" SLEEP_HEART_RATE_MAX = "sleep_heart_rate_max_bpm" SLEEP_HEART_RATE_MIN = "sleep_heart_rate_min_bpm" SLEEP_LIGHT_DURATION_SECONDS = "sleep_light_duration_seconds" SLEEP_REM_DURATION_SECONDS = "sleep_rem_duration_seconds" SLEEP_RESPIRATORY_RATE_AVERAGE = "sleep_respiratory_average_bpm" SLEEP_RESPIRATORY_RATE_MAX = "sleep_respiratory_max_bpm" SLEEP_RESPIRATORY_RATE_MIN = "sleep_respiratory_min_bpm" SLEEP_SCORE = "sleep_score" SLEEP_SNORING = "sleep_snoring" SLEEP_SNORING_EPISODE_COUNT = "sleep_snoring_eposode_count" SLEEP_TOSLEEP_DURATION_SECONDS = "sleep_tosleep_duration_seconds" SLEEP_TOWAKEUP_DURATION_SECONDS = "sleep_towakeup_duration_seconds" SLEEP_WAKEUP_COUNT = "sleep_wakeup_count" SLEEP_WAKEUP_DURATION_SECONDS = "sleep_wakeup_duration_seconds" SPO2_PCT = "spo2_pct" SYSTOLIC_MMGH = "systolic_blood_pressure_mmhg" TEMP_C = "temperature_c" WEIGHT_KG = "weight_kg" UOM_BEATS_PER_MINUTE = "bpm" UOM_BREATHS_PER_MINUTE = f"br/{const.TIME_MINUTES}" UOM_FREQUENCY = "times" UOM_MMHG = "mmhg" UOM_LENGTH_M = const.LENGTH_METERS UOM_TEMP_C = const.TEMP_CELSIUS
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/withings/const.py
"""Support the ISY-994 controllers.""" import asyncio from functools import partial from typing import Optional from urllib.parse import urlparse from pyisy import ISY import voluptuous as vol from homeassistant import config_entries from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv import homeassistant.helpers.device_registry as dr from homeassistant.helpers.typing import ConfigType from .const import ( _LOGGER, CONF_IGNORE_STRING, CONF_RESTORE_LIGHT_STATE, CONF_SENSOR_STRING, CONF_TLS_VER, CONF_VAR_SENSOR_STRING, DEFAULT_IGNORE_STRING, DEFAULT_RESTORE_LIGHT_STATE, DEFAULT_SENSOR_STRING, DEFAULT_VAR_SENSOR_STRING, DOMAIN, ISY994_ISY, ISY994_NODES, ISY994_PROGRAMS, ISY994_VARIABLES, MANUFACTURER, SUPPORTED_PLATFORMS, SUPPORTED_PROGRAM_PLATFORMS, UNDO_UPDATE_LISTENER, ) from .helpers import _categorize_nodes, _categorize_programs, _categorize_variables from .services import async_setup_services, async_unload_services CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_HOST): cv.url, vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_TLS_VER): vol.Coerce(float), vol.Optional( CONF_IGNORE_STRING, default=DEFAULT_IGNORE_STRING ): cv.string, vol.Optional( CONF_SENSOR_STRING, default=DEFAULT_SENSOR_STRING ): cv.string, vol.Optional( CONF_VAR_SENSOR_STRING, default=DEFAULT_VAR_SENSOR_STRING ): cv.string, vol.Required( CONF_RESTORE_LIGHT_STATE, default=DEFAULT_RESTORE_LIGHT_STATE ): bool, } ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the isy994 integration from YAML.""" isy_config: Optional[ConfigType] = config.get(DOMAIN) hass.data.setdefault(DOMAIN, {}) if not isy_config: return True # Only import if we haven't before. config_entry = _async_find_matching_config_entry(hass) if not config_entry: hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=dict(isy_config), ) ) return True # Update the entry based on the YAML configuration, in case it changed. hass.config_entries.async_update_entry(config_entry, data=dict(isy_config)) return True @callback def _async_find_matching_config_entry(hass): for entry in hass.config_entries.async_entries(DOMAIN): if entry.source == config_entries.SOURCE_IMPORT: return entry async def async_setup_entry( hass: HomeAssistant, entry: config_entries.ConfigEntry ) -> bool: """Set up the ISY 994 integration.""" # As there currently is no way to import options from yaml # when setting up a config entry, we fallback to adding # the options to the config entry and pull them out here if # they are missing from the options _async_import_options_from_data_if_missing(hass, entry) hass.data[DOMAIN][entry.entry_id] = {} hass_isy_data = hass.data[DOMAIN][entry.entry_id] hass_isy_data[ISY994_NODES] = {} for platform in SUPPORTED_PLATFORMS: hass_isy_data[ISY994_NODES][platform] = [] hass_isy_data[ISY994_PROGRAMS] = {} for platform in SUPPORTED_PROGRAM_PLATFORMS: hass_isy_data[ISY994_PROGRAMS][platform] = [] hass_isy_data[ISY994_VARIABLES] = [] isy_config = entry.data isy_options = entry.options # Required user = isy_config[CONF_USERNAME] password = isy_config[CONF_PASSWORD] host = urlparse(isy_config[CONF_HOST]) # Optional tls_version = isy_config.get(CONF_TLS_VER) ignore_identifier = isy_options.get(CONF_IGNORE_STRING, DEFAULT_IGNORE_STRING) sensor_identifier = isy_options.get(CONF_SENSOR_STRING, DEFAULT_SENSOR_STRING) variable_identifier = isy_options.get( CONF_VAR_SENSOR_STRING, DEFAULT_VAR_SENSOR_STRING ) if host.scheme == "http": https = False port = host.port or 80 elif host.scheme == "https": https = True port = host.port or 443 else: _LOGGER.error("isy994 host value in configuration is invalid") return False # Connect to ISY controller. isy = await hass.async_add_executor_job( partial( ISY, host.hostname, port, username=user, password=password, use_https=https, tls_ver=tls_version, webroot=host.path, ) ) if not isy.connected: return False # Trigger a status update for all nodes, not done automatically in PyISY v2.x await hass.async_add_executor_job(isy.nodes.update) _categorize_nodes(hass_isy_data, isy.nodes, ignore_identifier, sensor_identifier) _categorize_programs(hass_isy_data, isy.programs) _categorize_variables(hass_isy_data, isy.variables, variable_identifier) # Dump ISY Clock Information. Future: Add ISY as sensor to Hass with attrs _LOGGER.info(repr(isy.clock)) hass_isy_data[ISY994_ISY] = isy await _async_get_or_create_isy_device_in_registry(hass, entry, isy) # Load platforms for the devices in the ISY controller that we support. for platform in SUPPORTED_PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, platform) ) def _start_auto_update() -> None: """Start isy auto update.""" _LOGGER.debug("ISY Starting Event Stream and automatic updates") isy.auto_update = True await hass.async_add_executor_job(_start_auto_update) undo_listener = entry.add_update_listener(_async_update_listener) hass_isy_data[UNDO_UPDATE_LISTENER] = undo_listener # Register Integration-wide Services: async_setup_services(hass) return True async def _async_update_listener( hass: HomeAssistant, entry: config_entries.ConfigEntry ): """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) @callback def _async_import_options_from_data_if_missing( hass: HomeAssistant, entry: config_entries.ConfigEntry ): options = dict(entry.options) modified = False for importable_option in [ CONF_IGNORE_STRING, CONF_SENSOR_STRING, CONF_RESTORE_LIGHT_STATE, ]: if importable_option not in entry.options and importable_option in entry.data: options[importable_option] = entry.data[importable_option] modified = True if modified: hass.config_entries.async_update_entry(entry, options=options) async def _async_get_or_create_isy_device_in_registry( hass: HomeAssistant, entry: config_entries.ConfigEntry, isy ) -> None: device_registry = await dr.async_get_registry(hass) device_registry.async_get_or_create( config_entry_id=entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, isy.configuration["uuid"])}, identifiers={(DOMAIN, isy.configuration["uuid"])}, manufacturer=MANUFACTURER, name=isy.configuration["name"], model=isy.configuration["model"], sw_version=isy.configuration["firmware"], ) async def async_unload_entry( hass: HomeAssistant, entry: config_entries.ConfigEntry ) -> bool: """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, platform) for platform in SUPPORTED_PLATFORMS ] ) ) hass_isy_data = hass.data[DOMAIN][entry.entry_id] isy = hass_isy_data[ISY994_ISY] def _stop_auto_update() -> None: """Start isy auto update.""" _LOGGER.debug("ISY Stopping Event Stream and automatic updates") isy.auto_update = False await hass.async_add_executor_job(_stop_auto_update) hass_isy_data[UNDO_UPDATE_LISTENER]() if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) async_unload_services(hass) return unload_ok
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/isy994/__init__.py
"""Describe group states.""" from homeassistant.components.group import GroupIntegrationRegistry from homeassistant.core import callback from homeassistant.helpers.typing import HomeAssistantType @callback def async_describe_on_off_states( hass: HomeAssistantType, registry: GroupIntegrationRegistry ) -> None: """Describe group on off states.""" registry.exclude_domain()
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/air_quality/group.py
"""Config flow for Smart Meter Texas integration.""" import asyncio import logging from aiohttp import ClientError from smart_meter_texas import Account, Client from smart_meter_texas.exceptions import ( SmartMeterTexasAPIError, SmartMeterTexasAuthError, ) import voluptuous as vol from homeassistant import config_entries, core, exceptions from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers import aiohttp_client from .const import DOMAIN # pylint:disable=unused-import _LOGGER = logging.getLogger(__name__) DATA_SCHEMA = vol.Schema( {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} ) async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. """ client_session = aiohttp_client.async_get_clientsession(hass) account = Account(data["username"], data["password"]) client = Client(client_session, account) try: await client.authenticate() except (asyncio.TimeoutError, ClientError, SmartMeterTexasAPIError) as error: raise CannotConnect from error except SmartMeterTexasAuthError as error: raise InvalidAuth(error) from error # Return info that you want to store in the config entry. return {"title": account.username} class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Smart Meter Texas.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: try: info = await validate_input(self.hass, user_input) except CannotConnect: errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: if not errors: # Ensure the same account cannot be setup more than once. await self.async_set_unique_id(user_input[CONF_USERNAME]) self._abort_if_unique_id_configured() return self.async_create_entry(title=info["title"], data=user_input) return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors ) class CannotConnect(exceptions.HomeAssistantError): """Error to indicate we cannot connect.""" class InvalidAuth(exceptions.HomeAssistantError): """Error to indicate there is invalid auth."""
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/smart_meter_texas/config_flow.py
"""Support for RFXtrx switches.""" import logging import RFXtrx as rfxtrxmod from homeassistant.components.switch import SwitchEntity from homeassistant.const import CONF_DEVICES, STATE_ON from homeassistant.core import callback from . import ( CONF_DATA_BITS, CONF_SIGNAL_REPETITIONS, DEFAULT_SIGNAL_REPETITIONS, DOMAIN, RfxtrxCommandEntity, connect_auto_add, get_device_id, get_rfx_object, ) from .const import COMMAND_OFF_LIST, COMMAND_ON_LIST DATA_SWITCH = f"{DOMAIN}_switch" _LOGGER = logging.getLogger(__name__) def supported(event): """Return whether an event supports switch.""" return ( isinstance(event.device, rfxtrxmod.LightingDevice) and not event.device.known_to_be_dimmable and not event.device.known_to_be_rollershutter or isinstance(event.device, rfxtrxmod.RfyDevice) ) async def async_setup_entry( hass, config_entry, async_add_entities, ): """Set up config entry.""" discovery_info = config_entry.data device_ids = set() # Add switch from config file entities = [] for packet_id, entity_info in discovery_info[CONF_DEVICES].items(): event = get_rfx_object(packet_id) if event is None: _LOGGER.error("Invalid device: %s", packet_id) continue if not supported(event): continue device_id = get_device_id( event.device, data_bits=entity_info.get(CONF_DATA_BITS) ) if device_id in device_ids: continue device_ids.add(device_id) entity = RfxtrxSwitch( event.device, device_id, entity_info[CONF_SIGNAL_REPETITIONS] ) entities.append(entity) async_add_entities(entities) @callback def switch_update(event, device_id): """Handle sensor updates from the RFXtrx gateway.""" if not supported(event): return if device_id in device_ids: return device_ids.add(device_id) _LOGGER.info( "Added switch (Device ID: %s Class: %s Sub: %s, Event: %s)", event.device.id_string.lower(), event.device.__class__.__name__, event.device.subtype, "".join(f"{x:02x}" for x in event.data), ) entity = RfxtrxSwitch( event.device, device_id, DEFAULT_SIGNAL_REPETITIONS, event=event ) async_add_entities([entity]) # Subscribe to main RFXtrx events connect_auto_add(hass, discovery_info, switch_update) class RfxtrxSwitch(RfxtrxCommandEntity, SwitchEntity): """Representation of a RFXtrx switch.""" async def async_added_to_hass(self): """Restore device state.""" await super().async_added_to_hass() if self._event is None: old_state = await self.async_get_last_state() if old_state is not None: self._state = old_state.state == STATE_ON def _apply_event(self, event): """Apply command from rfxtrx.""" super()._apply_event(event) if event.values["Command"] in COMMAND_ON_LIST: self._state = True elif event.values["Command"] in COMMAND_OFF_LIST: self._state = False @callback def _handle_event(self, event, device_id): """Check if event applies to me and update.""" if device_id != self._device_id: return self._apply_event(event) self.async_write_ha_state() @property def is_on(self): """Return true if device is on.""" return self._state async def async_turn_on(self, **kwargs): """Turn the device on.""" await self._async_send(self._device.send_on) self._state = True self.async_write_ha_state() async def async_turn_off(self, **kwargs): """Turn the device off.""" await self._async_send(self._device.send_off) self._state = False self.async_write_ha_state()
"""The tests for the MQTT switch platform.""" import copy import json import pytest from homeassistant.components import switch from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.async_mock import patch from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "1") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "0") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ): assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "qos": "2", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": "beer on", "payload_off": "beer off", "value_template": "{{ value_json.val }}", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}') state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}') state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_default_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" config = { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, } } await help_test_custom_availability_payload( hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1" ) async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, switch.DOMAIN, { switch.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "state-topic", "command_topic": "command-topic", "payload_on": 1, "payload_off": 0, "state_on": "HIGH", "state_off": "LOW", } }, ) await hass.async_block_till_done() state = hass.states.get("switch.test") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "state-topic", "HIGH") state = hass.states.get("switch.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "state-topic", "LOW") state = hass.states.get("switch.test") assert state.state == STATE_OFF async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "command-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): """Test removal of discovered switch.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state2" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_switch_template(hass, mqtt_mock, caplog): """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN]) config1["name"] = "Beer" config2["name"] = "Milk" config1["state_topic"] = "switch/state1" config2["state_topic"] = "switch/state1" config1["value_template"] = "{{ value_json.state1.state }}" config2["value_template"] = "{{ value_json.state2.state }}" state_data1 = [ ([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None), ] state_data2 = [ ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None), ([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None), ([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None), ] data1 = json.dumps(config1) data2 = json.dumps(config2) await help_test_discovery_update( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog): """Test update of discovered switch.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.switch.MqttSwitch.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT switch device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG )
soldag/home-assistant
tests/components/mqtt/test_switch.py
homeassistant/components/rfxtrx/switch.py
""" SendGrid notification service. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/notify.sendgrid/ """ import logging import voluptuous as vol from homeassistant.components.notify import ( ATTR_TITLE, ATTR_TITLE_DEFAULT, PLATFORM_SCHEMA, BaseNotificationService) from homeassistant.const import ( CONF_API_KEY, CONF_SENDER, CONF_RECIPIENT, CONTENT_TYPE_TEXT_PLAIN) import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['sendgrid==5.6.0'] _LOGGER = logging.getLogger(__name__) # pylint: disable=no-value-for-parameter PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_API_KEY): cv.string, vol.Required(CONF_SENDER): vol.Email(), vol.Required(CONF_RECIPIENT): vol.Email(), }) def get_service(hass, config, discovery_info=None): """Get the SendGrid notification service.""" api_key = config.get(CONF_API_KEY) sender = config.get(CONF_SENDER) recipient = config.get(CONF_RECIPIENT) return SendgridNotificationService(api_key, sender, recipient) class SendgridNotificationService(BaseNotificationService): """Implementation the notification service for email via Sendgrid.""" def __init__(self, api_key, sender, recipient): """Initialize the service.""" from sendgrid import SendGridAPIClient self.api_key = api_key self.sender = sender self.recipient = recipient self._sg = SendGridAPIClient(apikey=self.api_key) def send_message(self, message='', **kwargs): """Send an email to a user via SendGrid.""" subject = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT) data = { "personalizations": [ { "to": [ { "email": self.recipient } ], "subject": subject } ], "from": { "email": self.sender }, "content": [ { "type": CONTENT_TYPE_TEXT_PLAIN, "value": message } ] } response = self._sg.client.mail.send.post(request_body=data) if response.status_code != 202: _LOGGER.error("Unable to send notification")
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/notify/sendgrid.py
""" Support for links to external web pages. For more details about this component, please refer to the documentation at https://home-assistant.io/components/weblink/ """ import logging import voluptuous as vol from homeassistant.const import (CONF_NAME, CONF_ICON, CONF_URL) from homeassistant.helpers.entity import Entity from homeassistant.util import slugify import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_ENTITIES = 'entities' CONF_RELATIVE_URL_ERROR_MSG = "Invalid relative URL. Absolute path required." CONF_RELATIVE_URL_REGEX = r'\A/' DOMAIN = 'weblink' ENTITIES_SCHEMA = vol.Schema({ # pylint: disable=no-value-for-parameter vol.Required(CONF_URL): vol.Any( vol.Match(CONF_RELATIVE_URL_REGEX, msg=CONF_RELATIVE_URL_ERROR_MSG), vol.Url()), vol.Required(CONF_NAME): cv.string, vol.Optional(CONF_ICON): cv.icon, }) CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ vol.Required(CONF_ENTITIES): [ENTITIES_SCHEMA], }), }, extra=vol.ALLOW_EXTRA) def setup(hass, config): """Set up the weblink component.""" links = config.get(DOMAIN) for link in links.get(CONF_ENTITIES): Link(hass, link.get(CONF_NAME), link.get(CONF_URL), link.get(CONF_ICON)) return True class Link(Entity): """Representation of a link.""" def __init__(self, hass, name, url, icon): """Initialize the link.""" self.hass = hass self._name = name self._url = url self._icon = icon self.entity_id = DOMAIN + '.%s' % slugify(name) self.schedule_update_ha_state() @property def icon(self): """Return the icon to use in the frontend, if any.""" return self._icon @property def name(self): """Return the name of the URL.""" return self._name @property def state(self): """Return the URL.""" return self._url
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/weblink.py
""" Interfaces with Verisure sensors. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/binary_sensor.verisure/ """ import logging from homeassistant.components.binary_sensor import BinarySensorDevice from homeassistant.components.verisure import CONF_DOOR_WINDOW from homeassistant.components.verisure import HUB as hub _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Verisure binary sensors.""" sensors = [] hub.update_overview() if int(hub.config.get(CONF_DOOR_WINDOW, 1)): sensors.extend([ VerisureDoorWindowSensor(device_label) for device_label in hub.get( "$.doorWindow.doorWindowDevice[*].deviceLabel")]) add_entities(sensors) class VerisureDoorWindowSensor(BinarySensorDevice): """Representation of a Verisure door window sensor.""" def __init__(self, device_label): """Initialize the Verisure door window sensor.""" self._device_label = device_label @property def name(self): """Return the name of the binary sensor.""" return hub.get_first( "$.doorWindow.doorWindowDevice[?(@.deviceLabel=='%s')].area", self._device_label) @property def is_on(self): """Return the state of the sensor.""" return hub.get_first( "$.doorWindow.doorWindowDevice[?(@.deviceLabel=='%s')].state", self._device_label) == "OPEN" @property def available(self): """Return True if entity is available.""" return hub.get_first( "$.doorWindow.doorWindowDevice[?(@.deviceLabel=='%s')]", self._device_label) is not None # pylint: disable=no-self-use def update(self): """Update the state of the sensor.""" hub.update_overview()
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/binary_sensor/verisure.py
""" Component to offer a way to select an option from a list. For more details about this component, please refer to the documentation at https://home-assistant.io/components/input_select/ """ import logging import voluptuous as vol from homeassistant.const import ATTR_ENTITY_ID, CONF_ICON, CONF_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.restore_state import RestoreEntity _LOGGER = logging.getLogger(__name__) DOMAIN = 'input_select' ENTITY_ID_FORMAT = DOMAIN + '.{}' CONF_INITIAL = 'initial' CONF_OPTIONS = 'options' ATTR_OPTION = 'option' ATTR_OPTIONS = 'options' SERVICE_SELECT_OPTION = 'select_option' SERVICE_SELECT_OPTION_SCHEMA = vol.Schema({ vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Required(ATTR_OPTION): cv.string, }) SERVICE_SELECT_NEXT = 'select_next' SERVICE_SELECT_NEXT_SCHEMA = vol.Schema({ vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, }) SERVICE_SELECT_PREVIOUS = 'select_previous' SERVICE_SELECT_PREVIOUS_SCHEMA = vol.Schema({ vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, }) SERVICE_SET_OPTIONS = 'set_options' SERVICE_SET_OPTIONS_SCHEMA = vol.Schema({ vol.Required(ATTR_ENTITY_ID): cv.entity_ids, vol.Required(ATTR_OPTIONS): vol.All(cv.ensure_list, vol.Length(min=1), [cv.string]), }) def _cv_input_select(cfg): """Configure validation helper for input select (voluptuous).""" options = cfg[CONF_OPTIONS] initial = cfg.get(CONF_INITIAL) if initial is not None and initial not in options: raise vol.Invalid('initial state "{}" is not part of the options: {}' .format(initial, ','.join(options))) return cfg CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ cv.slug: vol.All({ vol.Optional(CONF_NAME): cv.string, vol.Required(CONF_OPTIONS): vol.All(cv.ensure_list, vol.Length(min=1), [cv.string]), vol.Optional(CONF_INITIAL): cv.string, vol.Optional(CONF_ICON): cv.icon, }, _cv_input_select)}) }, required=True, extra=vol.ALLOW_EXTRA) async def async_setup(hass, config): """Set up an input select.""" component = EntityComponent(_LOGGER, DOMAIN, hass) entities = [] for object_id, cfg in config[DOMAIN].items(): name = cfg.get(CONF_NAME) options = cfg.get(CONF_OPTIONS) initial = cfg.get(CONF_INITIAL) icon = cfg.get(CONF_ICON) entities.append(InputSelect(object_id, name, initial, options, icon)) if not entities: return False component.async_register_entity_service( SERVICE_SELECT_OPTION, SERVICE_SELECT_OPTION_SCHEMA, 'async_select_option' ) component.async_register_entity_service( SERVICE_SELECT_NEXT, SERVICE_SELECT_NEXT_SCHEMA, lambda entity, call: entity.async_offset_index(1) ) component.async_register_entity_service( SERVICE_SELECT_PREVIOUS, SERVICE_SELECT_PREVIOUS_SCHEMA, lambda entity, call: entity.async_offset_index(-1) ) component.async_register_entity_service( SERVICE_SET_OPTIONS, SERVICE_SET_OPTIONS_SCHEMA, 'async_set_options' ) await component.async_add_entities(entities) return True class InputSelect(RestoreEntity): """Representation of a select input.""" def __init__(self, object_id, name, initial, options, icon): """Initialize a select input.""" self.entity_id = ENTITY_ID_FORMAT.format(object_id) self._name = name self._current_option = initial self._options = options self._icon = icon async def async_added_to_hass(self): """Run when entity about to be added.""" await super().async_added_to_hass() if self._current_option is not None: return state = await self.async_get_last_state() if not state or state.state not in self._options: self._current_option = self._options[0] else: self._current_option = state.state @property def should_poll(self): """If entity should be polled.""" return False @property def name(self): """Return the name of the select input.""" return self._name @property def icon(self): """Return the icon to be used for this entity.""" return self._icon @property def state(self): """Return the state of the component.""" return self._current_option @property def state_attributes(self): """Return the state attributes.""" return { ATTR_OPTIONS: self._options, } async def async_select_option(self, option): """Select new option.""" if option not in self._options: _LOGGER.warning('Invalid option: %s (possible options: %s)', option, ', '.join(self._options)) return self._current_option = option await self.async_update_ha_state() async def async_offset_index(self, offset): """Offset current index.""" current_index = self._options.index(self._current_option) new_index = (current_index + offset) % len(self._options) self._current_option = self._options[new_index] await self.async_update_ha_state() async def async_set_options(self, options): """Set options.""" self._current_option = options[0] self._options = options await self.async_update_ha_state()
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/input_select.py
""" Support for the voicerss speech service. For more details about this component, please refer to the documentation at https://home-assistant.io/components/tts.voicerss/ """ import asyncio import logging import aiohttp import async_timeout import voluptuous as vol from homeassistant.const import CONF_API_KEY from homeassistant.components.tts import Provider, PLATFORM_SCHEMA, CONF_LANG from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) VOICERSS_API_URL = "https://api.voicerss.org/" ERROR_MSG = [ b'Error description', b'The subscription is expired or requests count limitation is exceeded!', b'The request content length is too large!', b'The language does not support!', b'The language is not specified!', b'The text is not specified!', b'The API key is not available!', b'The API key is not specified!', b'The subscription does not support SSML!', ] SUPPORT_LANGUAGES = [ 'ca-es', 'zh-cn', 'zh-hk', 'zh-tw', 'da-dk', 'nl-nl', 'en-au', 'en-ca', 'en-gb', 'en-in', 'en-us', 'fi-fi', 'fr-ca', 'fr-fr', 'de-de', 'it-it', 'ja-jp', 'ko-kr', 'nb-no', 'pl-pl', 'pt-br', 'pt-pt', 'ru-ru', 'es-mx', 'es-es', 'sv-se', ] SUPPORT_CODECS = [ 'mp3', 'wav', 'aac', 'ogg', 'caf' ] SUPPORT_FORMATS = [ '8khz_8bit_mono', '8khz_8bit_stereo', '8khz_16bit_mono', '8khz_16bit_stereo', '11khz_8bit_mono', '11khz_8bit_stereo', '11khz_16bit_mono', '11khz_16bit_stereo', '12khz_8bit_mono', '12khz_8bit_stereo', '12khz_16bit_mono', '12khz_16bit_stereo', '16khz_8bit_mono', '16khz_8bit_stereo', '16khz_16bit_mono', '16khz_16bit_stereo', '22khz_8bit_mono', '22khz_8bit_stereo', '22khz_16bit_mono', '22khz_16bit_stereo', '24khz_8bit_mono', '24khz_8bit_stereo', '24khz_16bit_mono', '24khz_16bit_stereo', '32khz_8bit_mono', '32khz_8bit_stereo', '32khz_16bit_mono', '32khz_16bit_stereo', '44khz_8bit_mono', '44khz_8bit_stereo', '44khz_16bit_mono', '44khz_16bit_stereo', '48khz_8bit_mono', '48khz_8bit_stereo', '48khz_16bit_mono', '48khz_16bit_stereo', 'alaw_8khz_mono', 'alaw_8khz_stereo', 'alaw_11khz_mono', 'alaw_11khz_stereo', 'alaw_22khz_mono', 'alaw_22khz_stereo', 'alaw_44khz_mono', 'alaw_44khz_stereo', 'ulaw_8khz_mono', 'ulaw_8khz_stereo', 'ulaw_11khz_mono', 'ulaw_11khz_stereo', 'ulaw_22khz_mono', 'ulaw_22khz_stereo', 'ulaw_44khz_mono', 'ulaw_44khz_stereo', ] CONF_CODEC = 'codec' CONF_FORMAT = 'format' DEFAULT_LANG = 'en-us' DEFAULT_CODEC = 'mp3' DEFAULT_FORMAT = '8khz_8bit_mono' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORT_LANGUAGES), vol.Optional(CONF_CODEC, default=DEFAULT_CODEC): vol.In(SUPPORT_CODECS), vol.Optional(CONF_FORMAT, default=DEFAULT_FORMAT): vol.In(SUPPORT_FORMATS), }) async def async_get_engine(hass, config): """Set up VoiceRSS TTS component.""" return VoiceRSSProvider(hass, config) class VoiceRSSProvider(Provider): """The VoiceRSS speech API provider.""" def __init__(self, hass, conf): """Init VoiceRSS TTS service.""" self.hass = hass self._extension = conf[CONF_CODEC] self._lang = conf[CONF_LANG] self.name = 'VoiceRSS' self._form_data = { 'key': conf[CONF_API_KEY], 'hl': conf[CONF_LANG], 'c': (conf[CONF_CODEC]).upper(), 'f': conf[CONF_FORMAT], } @property def default_language(self): """Return the default language.""" return self._lang @property def supported_languages(self): """Return list of supported languages.""" return SUPPORT_LANGUAGES async def async_get_tts_audio(self, message, language, options=None): """Load TTS from VoiceRSS.""" websession = async_get_clientsession(self.hass) form_data = self._form_data.copy() form_data['src'] = message form_data['hl'] = language try: with async_timeout.timeout(10, loop=self.hass.loop): request = await websession.post( VOICERSS_API_URL, data=form_data ) if request.status != 200: _LOGGER.error("Error %d on load url %s.", request.status, request.url) return (None, None) data = await request.read() if data in ERROR_MSG: _LOGGER.error( "Error receive %s from VoiceRSS", str(data, 'utf-8')) return (None, None) except (asyncio.TimeoutError, aiohttp.ClientError): _LOGGER.error("Timeout for VoiceRSS API") return (None, None) return (self._extension, data)
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/tts/voicerss.py
""" Support for setting the Transmission BitTorrent client Turtle Mode. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/switch.transmission/ """ import logging import voluptuous as vol from homeassistant.components.switch import PLATFORM_SCHEMA from homeassistant.const import ( CONF_HOST, CONF_NAME, CONF_PORT, CONF_PASSWORD, CONF_USERNAME, STATE_OFF, STATE_ON) from homeassistant.helpers.entity import ToggleEntity import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['transmissionrpc==0.11'] _LOGGING = logging.getLogger(__name__) DEFAULT_NAME = 'Transmission Turtle Mode' DEFAULT_PORT = 9091 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PASSWORD): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_USERNAME): cv.string, }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Transmission switch.""" import transmissionrpc from transmissionrpc.error import TransmissionError name = config.get(CONF_NAME) host = config.get(CONF_HOST) username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) port = config.get(CONF_PORT) try: transmission_api = transmissionrpc.Client( host, port=port, user=username, password=password) transmission_api.session_stats() except TransmissionError as error: _LOGGING.error( "Connection to Transmission API failed on %s:%s with message %s", host, port, error.original ) return False add_entities([TransmissionSwitch(transmission_api, name)]) class TransmissionSwitch(ToggleEntity): """Representation of a Transmission switch.""" def __init__(self, transmission_client, name): """Initialize the Transmission switch.""" self._name = name self.transmission_client = transmission_client self._state = STATE_OFF @property def name(self): """Return the name of the switch.""" return self._name @property def state(self): """Return the state of the device.""" return self._state @property def should_poll(self): """Poll for status regularly.""" return True @property def is_on(self): """Return true if device is on.""" return self._state == STATE_ON def turn_on(self, **kwargs): """Turn the device on.""" _LOGGING.debug("Turning Turtle Mode of Transmission on") self.transmission_client.set_session(alt_speed_enabled=True) def turn_off(self, **kwargs): """Turn the device off.""" _LOGGING.debug("Turning Turtle Mode of Transmission off") self.transmission_client.set_session(alt_speed_enabled=False) def update(self): """Get the latest data from Transmission and updates the state.""" active = self.transmission_client.get_session().alt_speed_enabled self._state = STATE_ON if active else STATE_OFF
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/switch/transmission.py
"""Zwave discovery schemas.""" from . import const DEFAULT_VALUES_SCHEMA = { 'power': { const.DISC_SCHEMAS: [ {const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SENSOR_MULTILEVEL], const.DISC_INDEX: [const.INDEX_SENSOR_MULTILEVEL_POWER]}, {const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_METER], const.DISC_INDEX: [const.INDEX_METER_POWER]}, ], const.DISC_OPTIONAL: True, }, } DISCOVERY_SCHEMAS = [ {const.DISC_COMPONENT: 'binary_sensor', const.DISC_GENERIC_DEVICE_CLASS: [ const.GENERIC_TYPE_ENTRY_CONTROL, const.GENERIC_TYPE_SENSOR_ALARM, const.GENERIC_TYPE_SENSOR_BINARY, const.GENERIC_TYPE_SWITCH_BINARY, const.GENERIC_TYPE_METER, const.GENERIC_TYPE_SENSOR_MULTILEVEL, const.GENERIC_TYPE_SWITCH_MULTILEVEL, const.GENERIC_TYPE_SENSOR_NOTIFICATION, const.GENERIC_TYPE_THERMOSTAT], const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{ const.DISC_PRIMARY: { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SENSOR_BINARY], const.DISC_TYPE: const.TYPE_BOOL, const.DISC_GENRE: const.GENRE_USER, }, 'off_delay': { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_CONFIGURATION], const.DISC_INDEX: [9], const.DISC_OPTIONAL: True, }})}, {const.DISC_COMPONENT: 'climate', const.DISC_GENERIC_DEVICE_CLASS: [ const.GENERIC_TYPE_THERMOSTAT, const.GENERIC_TYPE_SENSOR_MULTILEVEL], const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{ const.DISC_PRIMARY: { const.DISC_COMMAND_CLASS: [ const.COMMAND_CLASS_THERMOSTAT_SETPOINT], }, 'temperature': { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SENSOR_MULTILEVEL], const.DISC_INDEX: [const.INDEX_SENSOR_MULTILEVEL_TEMPERATURE], const.DISC_OPTIONAL: True, }, 'mode': { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_THERMOSTAT_MODE], const.DISC_OPTIONAL: True, }, 'fan_mode': { const.DISC_COMMAND_CLASS: [ const.COMMAND_CLASS_THERMOSTAT_FAN_MODE], const.DISC_OPTIONAL: True, }, 'operating_state': { const.DISC_COMMAND_CLASS: [ const.COMMAND_CLASS_THERMOSTAT_OPERATING_STATE], const.DISC_OPTIONAL: True, }, 'fan_state': { const.DISC_COMMAND_CLASS: [ const.COMMAND_CLASS_THERMOSTAT_FAN_STATE], const.DISC_OPTIONAL: True, }, 'zxt_120_swing_mode': { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_CONFIGURATION], const.DISC_INDEX: [33], const.DISC_OPTIONAL: True, }})}, {const.DISC_COMPONENT: 'cover', # Rollershutter const.DISC_GENERIC_DEVICE_CLASS: [ const.GENERIC_TYPE_SWITCH_MULTILEVEL, const.GENERIC_TYPE_ENTRY_CONTROL], const.DISC_SPECIFIC_DEVICE_CLASS: [ const.SPECIFIC_TYPE_CLASS_A_MOTOR_CONTROL, const.SPECIFIC_TYPE_CLASS_B_MOTOR_CONTROL, const.SPECIFIC_TYPE_CLASS_C_MOTOR_CONTROL, const.SPECIFIC_TYPE_MOTOR_MULTIPOSITION, const.SPECIFIC_TYPE_SECURE_BARRIER_ADDON, const.SPECIFIC_TYPE_SECURE_DOOR], const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{ const.DISC_PRIMARY: { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_MULTILEVEL], const.DISC_GENRE: const.GENRE_USER, }, 'open': { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_MULTILEVEL], const.DISC_INDEX: [const.INDEX_SWITCH_MULTILEVEL_BRIGHT], const.DISC_OPTIONAL: True, }, 'close': { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_MULTILEVEL], const.DISC_INDEX: [const.INDEX_SWITCH_MULTILEVEL_DIM], const.DISC_OPTIONAL: True, }})}, {const.DISC_COMPONENT: 'cover', # Garage Door Switch const.DISC_GENERIC_DEVICE_CLASS: [ const.GENERIC_TYPE_SWITCH_MULTILEVEL, const.GENERIC_TYPE_ENTRY_CONTROL], const.DISC_SPECIFIC_DEVICE_CLASS: [ const.SPECIFIC_TYPE_CLASS_A_MOTOR_CONTROL, const.SPECIFIC_TYPE_CLASS_B_MOTOR_CONTROL, const.SPECIFIC_TYPE_CLASS_C_MOTOR_CONTROL, const.SPECIFIC_TYPE_MOTOR_MULTIPOSITION, const.SPECIFIC_TYPE_SECURE_BARRIER_ADDON, const.SPECIFIC_TYPE_SECURE_DOOR], const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{ const.DISC_PRIMARY: { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_BINARY], const.DISC_GENRE: const.GENRE_USER, }})}, {const.DISC_COMPONENT: 'cover', # Garage Door Barrier const.DISC_GENERIC_DEVICE_CLASS: [ const.GENERIC_TYPE_SWITCH_MULTILEVEL, const.GENERIC_TYPE_ENTRY_CONTROL], const.DISC_SPECIFIC_DEVICE_CLASS: [ const.SPECIFIC_TYPE_CLASS_A_MOTOR_CONTROL, const.SPECIFIC_TYPE_CLASS_B_MOTOR_CONTROL, const.SPECIFIC_TYPE_CLASS_C_MOTOR_CONTROL, const.SPECIFIC_TYPE_MOTOR_MULTIPOSITION, const.SPECIFIC_TYPE_SECURE_BARRIER_ADDON, const.SPECIFIC_TYPE_SECURE_DOOR], const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{ const.DISC_PRIMARY: { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_BARRIER_OPERATOR], const.DISC_INDEX: [const.INDEX_BARRIER_OPERATOR_LABEL], }})}, {const.DISC_COMPONENT: 'fan', const.DISC_GENERIC_DEVICE_CLASS: [ const.GENERIC_TYPE_SWITCH_MULTILEVEL], const.DISC_SPECIFIC_DEVICE_CLASS: [ const.SPECIFIC_TYPE_FAN_SWITCH], const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{ const.DISC_PRIMARY: { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_MULTILEVEL], const.DISC_INDEX: [const.INDEX_SWITCH_MULTILEVEL_LEVEL], const.DISC_TYPE: const.TYPE_BYTE, }})}, {const.DISC_COMPONENT: 'light', const.DISC_GENERIC_DEVICE_CLASS: [ const.GENERIC_TYPE_SWITCH_MULTILEVEL, const.GENERIC_TYPE_SWITCH_REMOTE], const.DISC_SPECIFIC_DEVICE_CLASS: [ const.SPECIFIC_TYPE_POWER_SWITCH_MULTILEVEL, const.SPECIFIC_TYPE_SCENE_SWITCH_MULTILEVEL, const.SPECIFIC_TYPE_NOT_USED], const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{ const.DISC_PRIMARY: { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_MULTILEVEL], const.DISC_INDEX: [const.INDEX_SWITCH_MULTILEVEL_LEVEL], const.DISC_TYPE: const.TYPE_BYTE, }, 'dimming_duration': { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_MULTILEVEL], const.DISC_INDEX: [const.INDEX_SWITCH_MULTILEVEL_DURATION], const.DISC_OPTIONAL: True, }, 'color': { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_COLOR], const.DISC_INDEX: [const.INDEX_SWITCH_COLOR_COLOR], const.DISC_OPTIONAL: True, }, 'color_channels': { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_COLOR], const.DISC_INDEX: [const.INDEX_SWITCH_COLOR_CHANNELS], const.DISC_OPTIONAL: True, }})}, {const.DISC_COMPONENT: 'lock', const.DISC_GENERIC_DEVICE_CLASS: [const.GENERIC_TYPE_ENTRY_CONTROL], const.DISC_SPECIFIC_DEVICE_CLASS: [ const.SPECIFIC_TYPE_DOOR_LOCK, const.SPECIFIC_TYPE_ADVANCED_DOOR_LOCK, const.SPECIFIC_TYPE_SECURE_KEYPAD_DOOR_LOCK, const.SPECIFIC_TYPE_SECURE_LOCKBOX], const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{ const.DISC_PRIMARY: { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_DOOR_LOCK], const.DISC_INDEX: [const.INDEX_DOOR_LOCK_LOCK], }, 'access_control': { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_ALARM], const.DISC_INDEX: [const.INDEX_ALARM_ACCESS_CONTROL], const.DISC_OPTIONAL: True, }, 'alarm_type': { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_ALARM], const.DISC_INDEX: [const.INDEX_ALARM_TYPE], const.DISC_OPTIONAL: True, }, 'alarm_level': { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_ALARM], const.DISC_INDEX: [const.INDEX_ALARM_LEVEL], const.DISC_OPTIONAL: True, }, 'v2btze_advanced': { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_CONFIGURATION], const.DISC_INDEX: [12], const.DISC_OPTIONAL: True, }})}, {const.DISC_COMPONENT: 'sensor', const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{ const.DISC_PRIMARY: { const.DISC_COMMAND_CLASS: [ const.COMMAND_CLASS_SENSOR_MULTILEVEL, const.COMMAND_CLASS_METER, const.COMMAND_CLASS_ALARM, const.COMMAND_CLASS_SENSOR_ALARM, const.COMMAND_CLASS_INDICATOR], const.DISC_GENRE: const.GENRE_USER, }})}, {const.DISC_COMPONENT: 'switch', const.DISC_GENERIC_DEVICE_CLASS: [ const.GENERIC_TYPE_METER, const.GENERIC_TYPE_SENSOR_ALARM, const.GENERIC_TYPE_SENSOR_BINARY, const.GENERIC_TYPE_SWITCH_BINARY, const.GENERIC_TYPE_ENTRY_CONTROL, const.GENERIC_TYPE_SENSOR_MULTILEVEL, const.GENERIC_TYPE_SWITCH_MULTILEVEL, const.GENERIC_TYPE_SENSOR_NOTIFICATION, const.GENERIC_TYPE_GENERIC_CONTROLLER, const.GENERIC_TYPE_SWITCH_REMOTE, const.GENERIC_TYPE_REPEATER_SLAVE, const.GENERIC_TYPE_THERMOSTAT, const.GENERIC_TYPE_WALL_CONTROLLER], const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{ const.DISC_PRIMARY: { const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_BINARY], const.DISC_TYPE: const.TYPE_BOOL, const.DISC_GENRE: const.GENRE_USER, }})}, ]
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/zwave/discovery_schemas.py
""" Exposes regular REST commands as services. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/hassio/ """ from datetime import timedelta import logging import os import voluptuous as vol from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import SERVICE_CHECK_CONFIG from homeassistant.const import ( ATTR_NAME, SERVICE_HOMEASSISTANT_RESTART, SERVICE_HOMEASSISTANT_STOP) from homeassistant.core import DOMAIN as HASS_DOMAIN from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from homeassistant.loader import bind_hass from homeassistant.util.dt import utcnow from homeassistant.exceptions import HomeAssistantError from .auth import async_setup_auth from .handler import HassIO, HassioAPIError from .discovery import async_setup_discovery from .http import HassIOView _LOGGER = logging.getLogger(__name__) DOMAIN = 'hassio' DEPENDENCIES = ['http'] STORAGE_KEY = DOMAIN STORAGE_VERSION = 1 CONF_FRONTEND_REPO = 'development_repo' CONFIG_SCHEMA = vol.Schema({ vol.Optional(DOMAIN): vol.Schema({ vol.Optional(CONF_FRONTEND_REPO): cv.isdir, }), }, extra=vol.ALLOW_EXTRA) DATA_HOMEASSISTANT_VERSION = 'hassio_hass_version' HASSIO_UPDATE_INTERVAL = timedelta(minutes=55) SERVICE_ADDON_START = 'addon_start' SERVICE_ADDON_STOP = 'addon_stop' SERVICE_ADDON_RESTART = 'addon_restart' SERVICE_ADDON_STDIN = 'addon_stdin' SERVICE_HOST_SHUTDOWN = 'host_shutdown' SERVICE_HOST_REBOOT = 'host_reboot' SERVICE_SNAPSHOT_FULL = 'snapshot_full' SERVICE_SNAPSHOT_PARTIAL = 'snapshot_partial' SERVICE_RESTORE_FULL = 'restore_full' SERVICE_RESTORE_PARTIAL = 'restore_partial' ATTR_ADDON = 'addon' ATTR_INPUT = 'input' ATTR_SNAPSHOT = 'snapshot' ATTR_ADDONS = 'addons' ATTR_FOLDERS = 'folders' ATTR_HOMEASSISTANT = 'homeassistant' ATTR_PASSWORD = 'password' SCHEMA_NO_DATA = vol.Schema({}) SCHEMA_ADDON = vol.Schema({ vol.Required(ATTR_ADDON): cv.slug, }) SCHEMA_ADDON_STDIN = SCHEMA_ADDON.extend({ vol.Required(ATTR_INPUT): vol.Any(dict, cv.string) }) SCHEMA_SNAPSHOT_FULL = vol.Schema({ vol.Optional(ATTR_NAME): cv.string, vol.Optional(ATTR_PASSWORD): cv.string, }) SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({ vol.Optional(ATTR_FOLDERS): vol.All(cv.ensure_list, [cv.string]), vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [cv.string]), }) SCHEMA_RESTORE_FULL = vol.Schema({ vol.Required(ATTR_SNAPSHOT): cv.slug, vol.Optional(ATTR_PASSWORD): cv.string, }) SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend({ vol.Optional(ATTR_HOMEASSISTANT): cv.boolean, vol.Optional(ATTR_FOLDERS): vol.All(cv.ensure_list, [cv.string]), vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [cv.string]), }) MAP_SERVICE_API = { SERVICE_ADDON_START: ('/addons/{addon}/start', SCHEMA_ADDON, 60, False), SERVICE_ADDON_STOP: ('/addons/{addon}/stop', SCHEMA_ADDON, 60, False), SERVICE_ADDON_RESTART: ('/addons/{addon}/restart', SCHEMA_ADDON, 60, False), SERVICE_ADDON_STDIN: ('/addons/{addon}/stdin', SCHEMA_ADDON_STDIN, 60, False), SERVICE_HOST_SHUTDOWN: ('/host/shutdown', SCHEMA_NO_DATA, 60, False), SERVICE_HOST_REBOOT: ('/host/reboot', SCHEMA_NO_DATA, 60, False), SERVICE_SNAPSHOT_FULL: ('/snapshots/new/full', SCHEMA_SNAPSHOT_FULL, 300, True), SERVICE_SNAPSHOT_PARTIAL: ('/snapshots/new/partial', SCHEMA_SNAPSHOT_PARTIAL, 300, True), SERVICE_RESTORE_FULL: ('/snapshots/{snapshot}/restore/full', SCHEMA_RESTORE_FULL, 300, True), SERVICE_RESTORE_PARTIAL: ('/snapshots/{snapshot}/restore/partial', SCHEMA_RESTORE_PARTIAL, 300, True), } @callback @bind_hass def get_homeassistant_version(hass): """Return latest available Home Assistant version. Async friendly. """ return hass.data.get(DATA_HOMEASSISTANT_VERSION) @callback @bind_hass def is_hassio(hass): """Return true if hass.io is loaded. Async friendly. """ return DOMAIN in hass.config.components @bind_hass async def async_check_config(hass): """Check configuration over Hass.io API.""" hassio = hass.data[DOMAIN] try: result = await hassio.check_homeassistant_config() except HassioAPIError as err: _LOGGER.error("Error on Hass.io API: %s", err) raise HomeAssistantError() from None else: if result['result'] == "error": return result['message'] return None async def async_setup(hass, config): """Set up the Hass.io component.""" # Check local setup for env in ('HASSIO', 'HASSIO_TOKEN'): if os.environ.get(env): continue _LOGGER.error("Missing %s environment variable.", env) return False host = os.environ['HASSIO'] websession = hass.helpers.aiohttp_client.async_get_clientsession() hass.data[DOMAIN] = hassio = HassIO(hass.loop, websession, host) if not await hassio.is_connected(): _LOGGER.error("Not connected with Hass.io") return False store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY) data = await store.async_load() if data is None: data = {} refresh_token = None if 'hassio_user' in data: user = await hass.auth.async_get_user(data['hassio_user']) if user and user.refresh_tokens: refresh_token = list(user.refresh_tokens.values())[0] # Migrate old hass.io users to be admin. if not user.is_admin: await hass.auth.async_update_user( user, group_ids=[GROUP_ID_ADMIN]) if refresh_token is None: user = await hass.auth.async_create_system_user( 'Hass.io', [GROUP_ID_ADMIN]) refresh_token = await hass.auth.async_create_refresh_token(user) data['hassio_user'] = user.id await store.async_save(data) # This overrides the normal API call that would be forwarded development_repo = config.get(DOMAIN, {}).get(CONF_FRONTEND_REPO) if development_repo is not None: hass.http.register_static_path( '/api/hassio/app', os.path.join(development_repo, 'hassio/build'), False) hass.http.register_view(HassIOView(host, websession)) if 'frontend' in hass.config.components: await hass.components.panel_custom.async_register_panel( frontend_url_path='hassio', webcomponent_name='hassio-main', sidebar_title='Hass.io', sidebar_icon='hass:home-assistant', js_url='/api/hassio/app/entrypoint.js', embed_iframe=True, ) await hassio.update_hass_api(config.get('http', {}), refresh_token.token) if 'homeassistant' in config: await hassio.update_hass_timezone(config['homeassistant']) async def async_service_handler(service): """Handle service calls for Hass.io.""" api_command = MAP_SERVICE_API[service.service][0] data = service.data.copy() addon = data.pop(ATTR_ADDON, None) snapshot = data.pop(ATTR_SNAPSHOT, None) payload = None # Pass data to hass.io API if service.service == SERVICE_ADDON_STDIN: payload = data[ATTR_INPUT] elif MAP_SERVICE_API[service.service][3]: payload = data # Call API try: await hassio.send_command( api_command.format(addon=addon, snapshot=snapshot), payload=payload, timeout=MAP_SERVICE_API[service.service][2] ) except HassioAPIError as err: _LOGGER.error("Error on Hass.io API: %s", err) for service, settings in MAP_SERVICE_API.items(): hass.services.async_register( DOMAIN, service, async_service_handler, schema=settings[1]) async def update_homeassistant_version(now): """Update last available Home Assistant version.""" try: data = await hassio.get_homeassistant_info() hass.data[DATA_HOMEASSISTANT_VERSION] = data['last_version'] except HassioAPIError as err: _LOGGER.warning("Can't read last version: %s", err) hass.helpers.event.async_track_point_in_utc_time( update_homeassistant_version, utcnow() + HASSIO_UPDATE_INTERVAL) # Fetch last version await update_homeassistant_version(None) async def async_handle_core_service(call): """Service handler for handling core services.""" if call.service == SERVICE_HOMEASSISTANT_STOP: await hassio.stop_homeassistant() return error = await async_check_config(hass) if error: _LOGGER.error(error) hass.components.persistent_notification.async_create( "Config error. See dev-info panel for details.", "Config validating", "{0}.check_config".format(HASS_DOMAIN)) return if call.service == SERVICE_HOMEASSISTANT_RESTART: await hassio.restart_homeassistant() # Mock core services for service in (SERVICE_HOMEASSISTANT_STOP, SERVICE_HOMEASSISTANT_RESTART, SERVICE_CHECK_CONFIG): hass.services.async_register( HASS_DOMAIN, service, async_handle_core_service) # Init discovery Hass.io feature async_setup_discovery(hass, hassio, config) # Init auth Hass.io feature async_setup_auth(hass) return True
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/hassio/__init__.py
""" Support for MQTT locks. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/lock.mqtt/ """ import logging import voluptuous as vol from homeassistant.core import callback from homeassistant.components.lock import LockDevice from homeassistant.components.mqtt import ( ATTR_DISCOVERY_HASH, CONF_AVAILABILITY_TOPIC, CONF_COMMAND_TOPIC, CONF_PAYLOAD_AVAILABLE, CONF_PAYLOAD_NOT_AVAILABLE, CONF_QOS, CONF_RETAIN, CONF_STATE_TOPIC, MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo) from homeassistant.const import ( CONF_DEVICE, CONF_NAME, CONF_OPTIMISTIC, CONF_VALUE_TEMPLATE) from homeassistant.components import mqtt, lock from homeassistant.components.mqtt.discovery import MQTT_DISCOVERY_NEW import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.typing import HomeAssistantType, ConfigType _LOGGER = logging.getLogger(__name__) CONF_PAYLOAD_LOCK = 'payload_lock' CONF_PAYLOAD_UNLOCK = 'payload_unlock' CONF_UNIQUE_ID = 'unique_id' DEFAULT_NAME = 'MQTT Lock' DEFAULT_OPTIMISTIC = False DEFAULT_PAYLOAD_LOCK = 'LOCK' DEFAULT_PAYLOAD_UNLOCK = 'UNLOCK' DEPENDENCIES = ['mqtt'] PLATFORM_SCHEMA = mqtt.MQTT_RW_PLATFORM_SCHEMA.extend({ vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PAYLOAD_LOCK, default=DEFAULT_PAYLOAD_LOCK): cv.string, vol.Optional(CONF_PAYLOAD_UNLOCK, default=DEFAULT_PAYLOAD_UNLOCK): cv.string, vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean, vol.Optional(CONF_UNIQUE_ID): cv.string, vol.Optional(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA, }).extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema) async def async_setup_platform(hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None): """Set up MQTT lock panel through configuration.yaml.""" await _async_setup_entity(hass, config, async_add_entities) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up MQTT lock dynamically through MQTT discovery.""" async def async_discover(discovery_payload): """Discover and add an MQTT lock.""" config = PLATFORM_SCHEMA(discovery_payload) await _async_setup_entity(hass, config, async_add_entities, discovery_payload[ATTR_DISCOVERY_HASH]) async_dispatcher_connect( hass, MQTT_DISCOVERY_NEW.format(lock.DOMAIN, 'mqtt'), async_discover) async def _async_setup_entity(hass, config, async_add_entities, discovery_hash=None): """Set up the MQTT Lock platform.""" value_template = config.get(CONF_VALUE_TEMPLATE) if value_template is not None: value_template.hass = hass async_add_entities([MqttLock( config.get(CONF_NAME), config.get(CONF_STATE_TOPIC), config.get(CONF_COMMAND_TOPIC), config.get(CONF_QOS), config.get(CONF_RETAIN), config.get(CONF_PAYLOAD_LOCK), config.get(CONF_PAYLOAD_UNLOCK), config.get(CONF_OPTIMISTIC), value_template, config.get(CONF_AVAILABILITY_TOPIC), config.get(CONF_PAYLOAD_AVAILABLE), config.get(CONF_PAYLOAD_NOT_AVAILABLE), config.get(CONF_UNIQUE_ID), config.get(CONF_DEVICE), discovery_hash, )]) class MqttLock(MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo, LockDevice): """Representation of a lock that can be toggled using MQTT.""" def __init__(self, name, state_topic, command_topic, qos, retain, payload_lock, payload_unlock, optimistic, value_template, availability_topic, payload_available, payload_not_available, unique_id, device_config, discovery_hash): """Initialize the lock.""" MqttAvailability.__init__(self, availability_topic, qos, payload_available, payload_not_available) MqttDiscoveryUpdate.__init__(self, discovery_hash) MqttEntityDeviceInfo.__init__(self, device_config) self._state = False self._name = name self._state_topic = state_topic self._command_topic = command_topic self._qos = qos self._retain = retain self._payload_lock = payload_lock self._payload_unlock = payload_unlock self._optimistic = optimistic self._template = value_template self._discovery_hash = discovery_hash self._unique_id = unique_id async def async_added_to_hass(self): """Subscribe to MQTT events.""" await super().async_added_to_hass() @callback def message_received(topic, payload, qos): """Handle new MQTT messages.""" if self._template is not None: payload = self._template.async_render_with_possible_json_value( payload) if payload == self._payload_lock: self._state = True elif payload == self._payload_unlock: self._state = False self.async_schedule_update_ha_state() if self._state_topic is None: # Force into optimistic mode. self._optimistic = True else: await mqtt.async_subscribe( self.hass, self._state_topic, message_received, self._qos) @property def should_poll(self): """No polling needed.""" return False @property def name(self): """Return the name of the lock.""" return self._name @property def unique_id(self): """Return a unique ID.""" return self._unique_id @property def is_locked(self): """Return true if lock is locked.""" return self._state @property def assumed_state(self): """Return true if we do optimistic updates.""" return self._optimistic async def async_lock(self, **kwargs): """Lock the device. This method is a coroutine. """ mqtt.async_publish( self.hass, self._command_topic, self._payload_lock, self._qos, self._retain) if self._optimistic: # Optimistically assume that switch has changed state. self._state = True self.async_schedule_update_ha_state() async def async_unlock(self, **kwargs): """Unlock the device. This method is a coroutine. """ mqtt.async_publish( self.hass, self._command_topic, self._payload_unlock, self._qos, self._retain) if self._optimistic: # Optimistically assume that switch has changed state. self._state = False self.async_schedule_update_ha_state()
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/lock/mqtt.py
"""Class to hold all media player accessories.""" import logging from pyhap.const import CATEGORY_SWITCH from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_MEDIA_PAUSE, SERVICE_MEDIA_PLAY, SERVICE_MEDIA_STOP, SERVICE_TURN_OFF, SERVICE_TURN_ON, SERVICE_VOLUME_MUTE, STATE_OFF, STATE_PLAYING, STATE_UNKNOWN) from homeassistant.components.media_player import ( ATTR_MEDIA_VOLUME_MUTED, DOMAIN) from . import TYPES from .accessories import HomeAccessory from .const import ( CHAR_NAME, CHAR_ON, CONF_FEATURE_LIST, FEATURE_ON_OFF, FEATURE_PLAY_PAUSE, FEATURE_PLAY_STOP, FEATURE_TOGGLE_MUTE, SERV_SWITCH) _LOGGER = logging.getLogger(__name__) MODE_FRIENDLY_NAME = {FEATURE_ON_OFF: 'Power', FEATURE_PLAY_PAUSE: 'Play/Pause', FEATURE_PLAY_STOP: 'Play/Stop', FEATURE_TOGGLE_MUTE: 'Mute'} @TYPES.register('MediaPlayer') class MediaPlayer(HomeAccessory): """Generate a Media Player accessory.""" def __init__(self, *args): """Initialize a Switch accessory object.""" super().__init__(*args, category=CATEGORY_SWITCH) self._flag = {FEATURE_ON_OFF: False, FEATURE_PLAY_PAUSE: False, FEATURE_PLAY_STOP: False, FEATURE_TOGGLE_MUTE: False} self.chars = {FEATURE_ON_OFF: None, FEATURE_PLAY_PAUSE: None, FEATURE_PLAY_STOP: None, FEATURE_TOGGLE_MUTE: None} feature_list = self.config[CONF_FEATURE_LIST] if FEATURE_ON_OFF in feature_list: name = self.generate_service_name(FEATURE_ON_OFF) serv_on_off = self.add_preload_service(SERV_SWITCH, CHAR_NAME) serv_on_off.configure_char(CHAR_NAME, value=name) self.chars[FEATURE_ON_OFF] = serv_on_off.configure_char( CHAR_ON, value=False, setter_callback=self.set_on_off) if FEATURE_PLAY_PAUSE in feature_list: name = self.generate_service_name(FEATURE_PLAY_PAUSE) serv_play_pause = self.add_preload_service(SERV_SWITCH, CHAR_NAME) serv_play_pause.configure_char(CHAR_NAME, value=name) self.chars[FEATURE_PLAY_PAUSE] = serv_play_pause.configure_char( CHAR_ON, value=False, setter_callback=self.set_play_pause) if FEATURE_PLAY_STOP in feature_list: name = self.generate_service_name(FEATURE_PLAY_STOP) serv_play_stop = self.add_preload_service(SERV_SWITCH, CHAR_NAME) serv_play_stop.configure_char(CHAR_NAME, value=name) self.chars[FEATURE_PLAY_STOP] = serv_play_stop.configure_char( CHAR_ON, value=False, setter_callback=self.set_play_stop) if FEATURE_TOGGLE_MUTE in feature_list: name = self.generate_service_name(FEATURE_TOGGLE_MUTE) serv_toggle_mute = self.add_preload_service(SERV_SWITCH, CHAR_NAME) serv_toggle_mute.configure_char(CHAR_NAME, value=name) self.chars[FEATURE_TOGGLE_MUTE] = serv_toggle_mute.configure_char( CHAR_ON, value=False, setter_callback=self.set_toggle_mute) def generate_service_name(self, mode): """Generate name for individual service.""" return '{} {}'.format(self.display_name, MODE_FRIENDLY_NAME[mode]) def set_on_off(self, value): """Move switch state to value if call came from HomeKit.""" _LOGGER.debug('%s: Set switch state for "on_off" to %s', self.entity_id, value) self._flag[FEATURE_ON_OFF] = True service = SERVICE_TURN_ON if value else SERVICE_TURN_OFF params = {ATTR_ENTITY_ID: self.entity_id} self.call_service(DOMAIN, service, params) def set_play_pause(self, value): """Move switch state to value if call came from HomeKit.""" _LOGGER.debug('%s: Set switch state for "play_pause" to %s', self.entity_id, value) self._flag[FEATURE_PLAY_PAUSE] = True service = SERVICE_MEDIA_PLAY if value else SERVICE_MEDIA_PAUSE params = {ATTR_ENTITY_ID: self.entity_id} self.call_service(DOMAIN, service, params) def set_play_stop(self, value): """Move switch state to value if call came from HomeKit.""" _LOGGER.debug('%s: Set switch state for "play_stop" to %s', self.entity_id, value) self._flag[FEATURE_PLAY_STOP] = True service = SERVICE_MEDIA_PLAY if value else SERVICE_MEDIA_STOP params = {ATTR_ENTITY_ID: self.entity_id} self.call_service(DOMAIN, service, params) def set_toggle_mute(self, value): """Move switch state to value if call came from HomeKit.""" _LOGGER.debug('%s: Set switch state for "toggle_mute" to %s', self.entity_id, value) self._flag[FEATURE_TOGGLE_MUTE] = True params = {ATTR_ENTITY_ID: self.entity_id, ATTR_MEDIA_VOLUME_MUTED: value} self.call_service(DOMAIN, SERVICE_VOLUME_MUTE, params) def update_state(self, new_state): """Update switch state after state changed.""" current_state = new_state.state if self.chars[FEATURE_ON_OFF]: hk_state = current_state not in (STATE_OFF, STATE_UNKNOWN, 'None') if not self._flag[FEATURE_ON_OFF]: _LOGGER.debug('%s: Set current state for "on_off" to %s', self.entity_id, hk_state) self.chars[FEATURE_ON_OFF].set_value(hk_state) self._flag[FEATURE_ON_OFF] = False if self.chars[FEATURE_PLAY_PAUSE]: hk_state = current_state == STATE_PLAYING if not self._flag[FEATURE_PLAY_PAUSE]: _LOGGER.debug('%s: Set current state for "play_pause" to %s', self.entity_id, hk_state) self.chars[FEATURE_PLAY_PAUSE].set_value(hk_state) self._flag[FEATURE_PLAY_PAUSE] = False if self.chars[FEATURE_PLAY_STOP]: hk_state = current_state == STATE_PLAYING if not self._flag[FEATURE_PLAY_STOP]: _LOGGER.debug('%s: Set current state for "play_stop" to %s', self.entity_id, hk_state) self.chars[FEATURE_PLAY_STOP].set_value(hk_state) self._flag[FEATURE_PLAY_STOP] = False if self.chars[FEATURE_TOGGLE_MUTE]: current_state = new_state.attributes.get(ATTR_MEDIA_VOLUME_MUTED) if not self._flag[FEATURE_TOGGLE_MUTE]: _LOGGER.debug('%s: Set current state for "toggle_mute" to %s', self.entity_id, current_state) self.chars[FEATURE_TOGGLE_MUTE].set_value(current_state) self._flag[FEATURE_TOGGLE_MUTE] = False
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/homekit/type_media_players.py
""" Configure a switch using a digital output from a raspihats board. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/switch.raspihats/ """ import logging import voluptuous as vol from homeassistant.components.raspihats import ( CONF_ADDRESS, CONF_BOARD, CONF_CHANNELS, CONF_I2C_HATS, CONF_INDEX, CONF_INITIAL_STATE, CONF_INVERT_LOGIC, I2C_HAT_NAMES, I2C_HATS_MANAGER, I2CHatsException) from homeassistant.components.switch import PLATFORM_SCHEMA from homeassistant.const import CONF_NAME, DEVICE_DEFAULT_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import ToggleEntity _LOGGER = logging.getLogger(__name__) DEPENDENCIES = ['raspihats'] _CHANNELS_SCHEMA = vol.Schema([{ vol.Required(CONF_INDEX): cv.positive_int, vol.Required(CONF_NAME): cv.string, vol.Optional(CONF_INVERT_LOGIC, default=False): cv.boolean, vol.Optional(CONF_INITIAL_STATE): cv.boolean, }]) _I2C_HATS_SCHEMA = vol.Schema([{ vol.Required(CONF_BOARD): vol.In(I2C_HAT_NAMES), vol.Required(CONF_ADDRESS): vol.Coerce(int), vol.Required(CONF_CHANNELS): _CHANNELS_SCHEMA, }]) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_I2C_HATS): _I2C_HATS_SCHEMA, }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the raspihats switch devices.""" I2CHatSwitch.I2C_HATS_MANAGER = hass.data[I2C_HATS_MANAGER] switches = [] i2c_hat_configs = config.get(CONF_I2C_HATS) for i2c_hat_config in i2c_hat_configs: board = i2c_hat_config[CONF_BOARD] address = i2c_hat_config[CONF_ADDRESS] try: I2CHatSwitch.I2C_HATS_MANAGER.register_board(board, address) for channel_config in i2c_hat_config[CONF_CHANNELS]: switches.append( I2CHatSwitch( board, address, channel_config[CONF_INDEX], channel_config[CONF_NAME], channel_config[CONF_INVERT_LOGIC], channel_config.get(CONF_INITIAL_STATE) ) ) except I2CHatsException as ex: _LOGGER.error( "Failed to register %s I2CHat@%s %s", board, hex(address), str(ex)) add_entities(switches) class I2CHatSwitch(ToggleEntity): """Representation a switch that uses a I2C-HAT digital output.""" I2C_HATS_MANAGER = None def __init__(self, board, address, channel, name, invert_logic, initial_state): """Initialize switch.""" self._board = board self._address = address self._channel = channel self._name = name or DEVICE_DEFAULT_NAME self._invert_logic = invert_logic if initial_state is not None: if self._invert_logic: state = not initial_state else: state = initial_state self.I2C_HATS_MANAGER.write_dq( self._address, self._channel, state) def online_callback(): """Call fired when board is online.""" self.schedule_update_ha_state() self.I2C_HATS_MANAGER.register_online_callback( self._address, self._channel, online_callback) def _log_message(self, message): """Create log message.""" string = self._name + " " string += self._board + "I2CHat@" + hex(self._address) + " " string += "channel:" + str(self._channel) + message return string @property def name(self): """Return the name of the switch.""" return self._name @property def should_poll(self): """Return the polling state.""" return False @property def is_on(self): """Return true if device is on.""" try: state = self.I2C_HATS_MANAGER.read_dq(self._address, self._channel) return state != self._invert_logic except I2CHatsException as ex: _LOGGER.error(self._log_message("Is ON check failed, " + str(ex))) return False def turn_on(self, **kwargs): """Turn the device on.""" try: state = self._invert_logic is False self.I2C_HATS_MANAGER.write_dq(self._address, self._channel, state) self.schedule_update_ha_state() except I2CHatsException as ex: _LOGGER.error(self._log_message("Turn ON failed, " + str(ex))) def turn_off(self, **kwargs): """Turn the device off.""" try: state = self._invert_logic is not False self.I2C_HATS_MANAGER.write_dq(self._address, self._channel, state) self.schedule_update_ha_state() except I2CHatsException as ex: _LOGGER.error( self._log_message("Turn OFF failed:, " + str(ex)))
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/switch/raspihats.py
""" Support for monitoring the Transmission BitTorrent client API. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.transmission/ """ from datetime import timedelta import logging import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_HOST, CONF_MONITORED_VARIABLES, CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, STATE_IDLE) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle from homeassistant.exceptions import PlatformNotReady REQUIREMENTS = ['transmissionrpc==0.11'] _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = 'Transmission' DEFAULT_PORT = 9091 SENSOR_TYPES = { 'active_torrents': ['Active Torrents', None], 'current_status': ['Status', None], 'download_speed': ['Down Speed', 'MB/s'], 'paused_torrents': ['Paused Torrents', None], 'total_torrents': ['Total Torrents', None], 'upload_speed': ['Up Speed', 'MB/s'], } SCAN_INTERVAL = timedelta(minutes=2) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_MONITORED_VARIABLES, default=['torrents']): vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PASSWORD): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_USERNAME): cv.string, }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Transmission sensors.""" import transmissionrpc from transmissionrpc.error import TransmissionError name = config.get(CONF_NAME) host = config.get(CONF_HOST) username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) port = config.get(CONF_PORT) try: transmission = transmissionrpc.Client( host, port=port, user=username, password=password) transmission_api = TransmissionData(transmission) except TransmissionError as error: if str(error).find("401: Unauthorized"): _LOGGER.error("Credentials for Transmission client are not valid") return _LOGGER.warning( "Unable to connect to Transmission client: %s:%s", host, port) raise PlatformNotReady dev = [] for variable in config[CONF_MONITORED_VARIABLES]: dev.append(TransmissionSensor(variable, transmission_api, name)) add_entities(dev, True) class TransmissionSensor(Entity): """Representation of a Transmission sensor.""" def __init__(self, sensor_type, transmission_api, client_name): """Initialize the sensor.""" self._name = SENSOR_TYPES[sensor_type][0] self._state = None self._transmission_api = transmission_api self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] self._data = None self.client_name = client_name self.type = sensor_type @property def name(self): """Return the name of the sensor.""" return '{} {}'.format(self.client_name, self._name) @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement @property def available(self): """Could the device be accessed during the last update call.""" return self._transmission_api.available def update(self): """Get the latest data from Transmission and updates the state.""" self._transmission_api.update() self._data = self._transmission_api.data if self.type == 'current_status': if self._data: upload = self._data.uploadSpeed download = self._data.downloadSpeed if upload > 0 and download > 0: self._state = 'Up/Down' elif upload > 0 and download == 0: self._state = 'Seeding' elif upload == 0 and download > 0: self._state = 'Downloading' else: self._state = STATE_IDLE else: self._state = None if self._data: if self.type == 'download_speed': mb_spd = float(self._data.downloadSpeed) mb_spd = mb_spd / 1024 / 1024 self._state = round(mb_spd, 2 if mb_spd < 0.1 else 1) elif self.type == 'upload_speed': mb_spd = float(self._data.uploadSpeed) mb_spd = mb_spd / 1024 / 1024 self._state = round(mb_spd, 2 if mb_spd < 0.1 else 1) elif self.type == 'active_torrents': self._state = self._data.activeTorrentCount elif self.type == 'paused_torrents': self._state = self._data.pausedTorrentCount elif self.type == 'total_torrents': self._state = self._data.torrentCount class TransmissionData: """Get the latest data and update the states.""" def __init__(self, api): """Initialize the Transmission data object.""" self.data = None self.available = True self._api = api @Throttle(SCAN_INTERVAL) def update(self): """Get the latest data from Transmission instance.""" from transmissionrpc.error import TransmissionError try: self.data = self._api.session_stats() self.available = True except TransmissionError: self.available = False _LOGGER.error("Unable to connect to Transmission client")
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/sensor/transmission.py
""" Support for Tibber. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.tibber/ """ import asyncio import logging from datetime import timedelta import aiohttp from homeassistant.components.tibber import DOMAIN as TIBBER_DOMAIN from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.entity import Entity from homeassistant.util import dt as dt_util from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) ICON = 'mdi:currency-usd' ICON_RT = 'mdi:power-plug' SCAN_INTERVAL = timedelta(minutes=1) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Tibber sensor.""" if discovery_info is None: _LOGGER.error("Tibber sensor configuration has changed." " Check https://home-assistant.io/components/tibber/") return tibber_connection = hass.data.get(TIBBER_DOMAIN) dev = [] for home in tibber_connection.get_homes(): try: await home.update_info() except asyncio.TimeoutError as err: _LOGGER.error("Timeout connecting to Tibber home: %s ", err) raise PlatformNotReady() except aiohttp.ClientError as err: _LOGGER.error("Error connecting to Tibber home: %s ", err) raise PlatformNotReady() dev.append(TibberSensorElPrice(home)) if home.has_real_time_consumption: dev.append(TibberSensorRT(home)) async_add_entities(dev, False) class TibberSensorElPrice(Entity): """Representation of an Tibber sensor for el price.""" def __init__(self, tibber_home): """Initialize the sensor.""" self._tibber_home = tibber_home self._last_updated = None self._last_data_timestamp = None self._state = None self._is_available = False self._device_state_attributes = {} self._unit_of_measurement = self._tibber_home.price_unit self._name = 'Electricity price {}'.format(tibber_home.info['viewer'] ['home']['appNickname']) async def async_update(self): """Get the latest data and updates the states.""" now = dt_util.now() if self._tibber_home.current_price_total and self._last_updated and \ self._last_updated.hour == now.hour and self._last_data_timestamp: return if (not self._last_data_timestamp or (self._last_data_timestamp - now).total_seconds()/3600 < 12 or not self._is_available): _LOGGER.debug("Asking for new data.") await self._fetch_data() self._is_available = self._update_current_price() @property def device_state_attributes(self): """Return the state attributes.""" return self._device_state_attributes @property def available(self): """Return True if entity is available.""" return self._is_available @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the device.""" return self._state @property def icon(self): """Return the icon to use in the frontend.""" return ICON @property def unit_of_measurement(self): """Return the unit of measurement of this entity.""" return self._unit_of_measurement @property def unique_id(self): """Return a unique ID.""" home = self._tibber_home.info['viewer']['home'] return home['meteringPointData']['consumptionEan'] @Throttle(MIN_TIME_BETWEEN_UPDATES) async def _fetch_data(self): try: await self._tibber_home.update_info() await self._tibber_home.update_price_info() except (asyncio.TimeoutError, aiohttp.ClientError): return data = self._tibber_home.info['viewer']['home'] self._device_state_attributes['app_nickname'] = data['appNickname'] self._device_state_attributes['grid_company'] = \ data['meteringPointData']['gridCompany'] self._device_state_attributes['estimated_annual_consumption'] = \ data['meteringPointData']['estimatedAnnualConsumption'] def _update_current_price(self): state = None max_price = 0 min_price = 10000 sum_price = 0 num = 0 now = dt_util.now() for key, price_total in self._tibber_home.price_total.items(): price_time = dt_util.as_local(dt_util.parse_datetime(key)) price_total = round(price_total, 3) time_diff = (now - price_time).total_seconds()/60 if (not self._last_data_timestamp or price_time > self._last_data_timestamp): self._last_data_timestamp = price_time if 0 <= time_diff < 60: state = price_total self._last_updated = price_time if now.date() == price_time.date(): max_price = max(max_price, price_total) min_price = min(min_price, price_total) num += 1 sum_price += price_total self._state = state self._device_state_attributes['max_price'] = max_price self._device_state_attributes['avg_price'] = round(sum_price / num, 3) self._device_state_attributes['min_price'] = min_price return state is not None class TibberSensorRT(Entity): """Representation of an Tibber sensor for real time consumption.""" def __init__(self, tibber_home): """Initialize the sensor.""" self._tibber_home = tibber_home self._state = None self._device_state_attributes = {} self._unit_of_measurement = 'W' nickname = tibber_home.info['viewer']['home']['appNickname'] self._name = 'Real time consumption {}'.format(nickname) async def async_added_to_hass(self): """Start unavailability tracking.""" await self._tibber_home.rt_subscribe(self.hass.loop, self._async_callback) async def _async_callback(self, payload): """Handle received data.""" errors = payload.get('errors') if errors: _LOGGER.error(errors[0]) return data = payload.get('data') if data is None: return live_measurement = data.get('liveMeasurement') if live_measurement is None: return self._state = live_measurement.pop('power', None) for key, value in live_measurement.items(): if value is None: continue self._device_state_attributes[key] = value self.async_schedule_update_ha_state() @property def device_state_attributes(self): """Return the state attributes.""" return self._device_state_attributes @property def available(self): """Return True if entity is available.""" return self._tibber_home.rt_subscription_running @property def name(self): """Return the name of the sensor.""" return self._name @property def should_poll(self): """Return the polling state.""" return False @property def state(self): """Return the state of the device.""" return self._state @property def icon(self): """Return the icon to use in the frontend.""" return ICON_RT @property def unit_of_measurement(self): """Return the unit of measurement of this entity.""" return self._unit_of_measurement @property def unique_id(self): """Return a unique ID.""" home = self._tibber_home.info['viewer']['home'] _id = home['meteringPointData']['consumptionEan'] return'{}_rt_consumption'.format(_id)
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/sensor/tibber.py
""" Support for interfacing with Monoprice Blackbird 4k 8x8 HDBaseT Matrix. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/media_player.blackbird """ import logging import socket import voluptuous as vol from homeassistant.components.media_player import ( DOMAIN, MEDIA_PLAYER_SCHEMA, PLATFORM_SCHEMA, SUPPORT_SELECT_SOURCE, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, MediaPlayerDevice) from homeassistant.const import ( ATTR_ENTITY_ID, CONF_HOST, CONF_NAME, CONF_PORT, CONF_TYPE, STATE_OFF, STATE_ON) import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['pyblackbird==0.5'] _LOGGER = logging.getLogger(__name__) SUPPORT_BLACKBIRD = SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE ZONE_SCHEMA = vol.Schema({ vol.Required(CONF_NAME): cv.string, }) SOURCE_SCHEMA = vol.Schema({ vol.Required(CONF_NAME): cv.string, }) CONF_ZONES = 'zones' CONF_SOURCES = 'sources' DATA_BLACKBIRD = 'blackbird' SERVICE_SETALLZONES = 'blackbird_set_all_zones' ATTR_SOURCE = 'source' BLACKBIRD_SETALLZONES_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({ vol.Required(ATTR_SOURCE): cv.string }) # Valid zone ids: 1-8 ZONE_IDS = vol.All(vol.Coerce(int), vol.Range(min=1, max=8)) # Valid source ids: 1-8 SOURCE_IDS = vol.All(vol.Coerce(int), vol.Range(min=1, max=8)) PLATFORM_SCHEMA = vol.All( cv.has_at_least_one_key(CONF_PORT, CONF_HOST), PLATFORM_SCHEMA.extend({ vol.Exclusive(CONF_PORT, CONF_TYPE): cv.string, vol.Exclusive(CONF_HOST, CONF_TYPE): cv.string, vol.Required(CONF_ZONES): vol.Schema({ZONE_IDS: ZONE_SCHEMA}), vol.Required(CONF_SOURCES): vol.Schema({SOURCE_IDS: SOURCE_SCHEMA}), })) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Monoprice Blackbird 4k 8x8 HDBaseT Matrix platform.""" if DATA_BLACKBIRD not in hass.data: hass.data[DATA_BLACKBIRD] = {} port = config.get(CONF_PORT) host = config.get(CONF_HOST) from pyblackbird import get_blackbird from serial import SerialException connection = None if port is not None: try: blackbird = get_blackbird(port) connection = port except SerialException: _LOGGER.error("Error connecting to the Blackbird controller") return if host is not None: try: blackbird = get_blackbird(host, False) connection = host except socket.timeout: _LOGGER.error("Error connecting to the Blackbird controller") return sources = {source_id: extra[CONF_NAME] for source_id, extra in config[CONF_SOURCES].items()} devices = [] for zone_id, extra in config[CONF_ZONES].items(): _LOGGER.info("Adding zone %d - %s", zone_id, extra[CONF_NAME]) unique_id = "{}-{}".format(connection, zone_id) device = BlackbirdZone(blackbird, sources, zone_id, extra[CONF_NAME]) hass.data[DATA_BLACKBIRD][unique_id] = device devices.append(device) add_entities(devices, True) def service_handle(service): """Handle for services.""" entity_ids = service.data.get(ATTR_ENTITY_ID) source = service.data.get(ATTR_SOURCE) if entity_ids: devices = [device for device in hass.data[DATA_BLACKBIRD].values() if device.entity_id in entity_ids] else: devices = hass.data[DATA_BLACKBIRD].values() for device in devices: if service.service == SERVICE_SETALLZONES: device.set_all_zones(source) hass.services.register(DOMAIN, SERVICE_SETALLZONES, service_handle, schema=BLACKBIRD_SETALLZONES_SCHEMA) class BlackbirdZone(MediaPlayerDevice): """Representation of a Blackbird matrix zone.""" def __init__(self, blackbird, sources, zone_id, zone_name): """Initialize new zone.""" self._blackbird = blackbird # dict source_id -> source name self._source_id_name = sources # dict source name -> source_id self._source_name_id = {v: k for k, v in sources.items()} # ordered list of all source names self._source_names = sorted(self._source_name_id.keys(), key=lambda v: self._source_name_id[v]) self._zone_id = zone_id self._name = zone_name self._state = None self._source = None def update(self): """Retrieve latest state.""" state = self._blackbird.zone_status(self._zone_id) if not state: return self._state = STATE_ON if state.power else STATE_OFF idx = state.av if idx in self._source_id_name: self._source = self._source_id_name[idx] else: self._source = None @property def name(self): """Return the name of the zone.""" return self._name @property def state(self): """Return the state of the zone.""" return self._state @property def supported_features(self): """Return flag of media commands that are supported.""" return SUPPORT_BLACKBIRD @property def media_title(self): """Return the current source as media title.""" return self._source @property def source(self): """Return the current input source of the device.""" return self._source @property def source_list(self): """List of available input sources.""" return self._source_names def set_all_zones(self, source): """Set all zones to one source.""" if source not in self._source_name_id: return idx = self._source_name_id[source] _LOGGER.debug("Setting all zones source to %s", idx) self._blackbird.set_all_zone_source(idx) def select_source(self, source): """Set input source.""" if source not in self._source_name_id: return idx = self._source_name_id[source] _LOGGER.debug("Setting zone %d source to %s", self._zone_id, idx) self._blackbird.set_zone_source(self._zone_id, idx) def turn_on(self): """Turn the media player on.""" _LOGGER.debug("Turning zone %d on", self._zone_id) self._blackbird.set_zone_power(self._zone_id, True) def turn_off(self): """Turn the media player off.""" _LOGGER.debug("Turning zone %d off", self._zone_id) self._blackbird.set_zone_power(self._zone_id, False)
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/media_player/blackbird.py
""" Support for Tellstick sensors. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.tellstick/ """ import logging from collections import namedtuple import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import TEMP_CELSIUS from homeassistant.helpers.entity import Entity import homeassistant.helpers.config_validation as cv DEPENDENCIES = ['tellstick'] _LOGGER = logging.getLogger(__name__) DatatypeDescription = namedtuple('DatatypeDescription', ['name', 'unit']) CONF_DATATYPE_MASK = 'datatype_mask' CONF_ONLY_NAMED = 'only_named' CONF_TEMPERATURE_SCALE = 'temperature_scale' DEFAULT_DATATYPE_MASK = 127 DEFAULT_ONLY_NAMED = False DEFAULT_TEMPERATURE_SCALE = TEMP_CELSIUS PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_ONLY_NAMED, default=DEFAULT_ONLY_NAMED): cv.boolean, vol.Optional(CONF_TEMPERATURE_SCALE, default=DEFAULT_TEMPERATURE_SCALE): cv.string, vol.Optional(CONF_DATATYPE_MASK, default=DEFAULT_DATATYPE_MASK): cv.positive_int, }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Tellstick sensors.""" from tellcore import telldus import tellcore.constants as tellcore_constants sensor_value_descriptions = { tellcore_constants.TELLSTICK_TEMPERATURE: DatatypeDescription('temperature', config.get(CONF_TEMPERATURE_SCALE)), tellcore_constants.TELLSTICK_HUMIDITY: DatatypeDescription('humidity', '%'), tellcore_constants.TELLSTICK_RAINRATE: DatatypeDescription('rain rate', ''), tellcore_constants.TELLSTICK_RAINTOTAL: DatatypeDescription('rain total', ''), tellcore_constants.TELLSTICK_WINDDIRECTION: DatatypeDescription('wind direction', ''), tellcore_constants.TELLSTICK_WINDAVERAGE: DatatypeDescription('wind average', ''), tellcore_constants.TELLSTICK_WINDGUST: DatatypeDescription('wind gust', '') } try: tellcore_lib = telldus.TelldusCore() except OSError: _LOGGER.exception('Could not initialize Tellstick') return sensors = [] datatype_mask = config.get(CONF_DATATYPE_MASK) for tellcore_sensor in tellcore_lib.sensors(): try: sensor_name = config[tellcore_sensor.id] except KeyError: if config.get(CONF_ONLY_NAMED): continue sensor_name = str(tellcore_sensor.id) for datatype in sensor_value_descriptions: if datatype & datatype_mask: if tellcore_sensor.has_value(datatype): sensor_info = sensor_value_descriptions[datatype] sensors.append(TellstickSensor( sensor_name, tellcore_sensor, datatype, sensor_info)) add_entities(sensors) class TellstickSensor(Entity): """Representation of a Tellstick sensor.""" def __init__(self, name, tellcore_sensor, datatype, sensor_info): """Initialize the sensor.""" self._datatype = datatype self._tellcore_sensor = tellcore_sensor self._unit_of_measurement = sensor_info.unit or None self._value = None self._name = '{} {}'.format(name, sensor_info.name) @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._value @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement def update(self): """Update tellstick sensor.""" self._value = self._tellcore_sensor.value(self._datatype).value
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/sensor/tellstick.py
""" This component provides HA sensor support for Ring Door Bell/Chimes. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.ring/ """ from datetime import timedelta import logging import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.components.ring import ( CONF_ATTRIBUTION, DEFAULT_ENTITY_NAMESPACE, DATA_RING) from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_ENTITY_NAMESPACE, CONF_MONITORED_CONDITIONS, STATE_UNKNOWN, ATTR_ATTRIBUTION) from homeassistant.helpers.entity import Entity from homeassistant.helpers.icon import icon_for_battery_level DEPENDENCIES = ['ring'] _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(seconds=30) # Sensor types: Name, category, units, icon, kind SENSOR_TYPES = { 'battery': [ 'Battery', ['doorbell', 'stickup_cams'], '%', 'battery-50', None], 'last_activity': [ 'Last Activity', ['doorbell', 'stickup_cams'], None, 'history', None], 'last_ding': [ 'Last Ding', ['doorbell'], None, 'history', 'ding'], 'last_motion': [ 'Last Motion', ['doorbell', 'stickup_cams'], None, 'history', 'motion'], 'volume': [ 'Volume', ['chime', 'doorbell', 'stickup_cams'], None, 'bell-ring', None], 'wifi_signal_category': [ 'WiFi Signal Category', ['chime', 'doorbell', 'stickup_cams'], None, 'wifi', None], 'wifi_signal_strength': [ 'WiFi Signal Strength', ['chime', 'doorbell', 'stickup_cams'], 'dBm', 'wifi', None], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_ENTITY_NAMESPACE, default=DEFAULT_ENTITY_NAMESPACE): cv.string, vol.Required(CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)): vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]), }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up a sensor for a Ring device.""" ring = hass.data[DATA_RING] sensors = [] for device in ring.chimes: # ring.chimes is doing I/O for sensor_type in config[CONF_MONITORED_CONDITIONS]: if 'chime' in SENSOR_TYPES[sensor_type][1]: sensors.append(RingSensor(hass, device, sensor_type)) for device in ring.doorbells: # ring.doorbells is doing I/O for sensor_type in config[CONF_MONITORED_CONDITIONS]: if 'doorbell' in SENSOR_TYPES[sensor_type][1]: sensors.append(RingSensor(hass, device, sensor_type)) for device in ring.stickup_cams: # ring.stickup_cams is doing I/O for sensor_type in config[CONF_MONITORED_CONDITIONS]: if 'stickup_cams' in SENSOR_TYPES[sensor_type][1]: sensors.append(RingSensor(hass, device, sensor_type)) add_entities(sensors, True) return True class RingSensor(Entity): """A sensor implementation for Ring device.""" def __init__(self, hass, data, sensor_type): """Initialize a sensor for Ring device.""" super(RingSensor, self).__init__() self._sensor_type = sensor_type self._data = data self._extra = None self._icon = 'mdi:{}'.format(SENSOR_TYPES.get(self._sensor_type)[3]) self._kind = SENSOR_TYPES.get(self._sensor_type)[4] self._name = "{0} {1}".format( self._data.name, SENSOR_TYPES.get(self._sensor_type)[0]) self._state = STATE_UNKNOWN self._tz = str(hass.config.time_zone) self._unique_id = '{}-{}'.format(self._data.id, self._sensor_type) @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def unique_id(self): """Return a unique ID.""" return self._unique_id @property def device_state_attributes(self): """Return the state attributes.""" attrs = {} attrs[ATTR_ATTRIBUTION] = CONF_ATTRIBUTION attrs['device_id'] = self._data.id attrs['firmware'] = self._data.firmware attrs['kind'] = self._data.kind attrs['timezone'] = self._data.timezone attrs['type'] = self._data.family attrs['wifi_name'] = self._data.wifi_name if self._extra and self._sensor_type.startswith('last_'): attrs['created_at'] = self._extra['created_at'] attrs['answered'] = self._extra['answered'] attrs['recording_status'] = self._extra['recording']['status'] attrs['category'] = self._extra['kind'] return attrs @property def icon(self): """Icon to use in the frontend, if any.""" if self._sensor_type == 'battery' and self._state is not STATE_UNKNOWN: return icon_for_battery_level(battery_level=int(self._state), charging=False) return self._icon @property def unit_of_measurement(self): """Return the units of measurement.""" return SENSOR_TYPES.get(self._sensor_type)[2] def update(self): """Get the latest data and updates the state.""" _LOGGER.debug("Pulling data from %s sensor", self._name) self._data.update() if self._sensor_type == 'volume': self._state = self._data.volume if self._sensor_type == 'battery': self._state = self._data.battery_life if self._sensor_type.startswith('last_'): history = self._data.history(limit=5, timezone=self._tz, kind=self._kind, enforce_limit=True) if history: self._extra = history[0] created_at = self._extra['created_at'] self._state = '{0:0>2}:{1:0>2}'.format( created_at.hour, created_at.minute) if self._sensor_type == 'wifi_signal_category': self._state = self._data.wifi_signal_category if self._sensor_type == 'wifi_signal_strength': self._state = self._data.wifi_signal_strength
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/sensor/ring.py
""" A platform that to monitor Uptime Robot monitors. For more details about this platform, please refer to the documentation at https://www.home-assistant.io/components/binary_sensor.uptimerobot/ """ import logging import voluptuous as vol from homeassistant.components.binary_sensor import ( PLATFORM_SCHEMA, BinarySensorDevice) from homeassistant.const import ATTR_ATTRIBUTION, CONF_API_KEY import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['pyuptimerobot==0.0.5'] _LOGGER = logging.getLogger(__name__) ATTR_TARGET = 'target' CONF_ATTRIBUTION = "Data provided by Uptime Robot" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_API_KEY): cv.string, }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Uptime Robot binary_sensors.""" from pyuptimerobot import UptimeRobot up_robot = UptimeRobot() api_key = config.get(CONF_API_KEY) monitors = up_robot.getMonitors(api_key) devices = [] if not monitors or monitors.get('stat') != 'ok': _LOGGER.error("Error connecting to Uptime Robot") return for monitor in monitors['monitors']: devices.append(UptimeRobotBinarySensor( api_key, up_robot, monitor['id'], monitor['friendly_name'], monitor['url'])) add_entities(devices, True) class UptimeRobotBinarySensor(BinarySensorDevice): """Representation of a Uptime Robot binary sensor.""" def __init__(self, api_key, up_robot, monitor_id, name, target): """Initialize Uptime Robot the binary sensor.""" self._api_key = api_key self._monitor_id = str(monitor_id) self._name = name self._target = target self._up_robot = up_robot self._state = None @property def name(self): """Return the name of the binary sensor.""" return self._name @property def is_on(self): """Return the state of the binary sensor.""" return self._state @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return 'connectivity' @property def device_state_attributes(self): """Return the state attributes of the binary sensor.""" return { ATTR_ATTRIBUTION: CONF_ATTRIBUTION, ATTR_TARGET: self._target, } def update(self): """Get the latest state of the binary sensor.""" monitor = self._up_robot.getMonitors(self._api_key, self._monitor_id) if not monitor or monitor.get('stat') != 'ok': _LOGGER.warning("Failed to get new state") return status = monitor['monitors'][0]['status'] self._state = 1 if status == 2 else 0
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/binary_sensor/uptimerobot.py
""" Support for IKEA Tradfri. For more details about this component, please refer to the documentation at https://home-assistant.io/components/ikea_tradfri/ """ import logging import voluptuous as vol from homeassistant import config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STOP import homeassistant.helpers.config_validation as cv from homeassistant.util.json import load_json from .const import ( CONF_IMPORT_GROUPS, CONF_IDENTITY, CONF_HOST, CONF_KEY, CONF_GATEWAY_ID) from . import config_flow # noqa pylint_disable=unused-import REQUIREMENTS = ['pytradfri[async]==6.0.1'] DOMAIN = 'tradfri' CONFIG_FILE = '.tradfri_psk.conf' KEY_GATEWAY = 'tradfri_gateway' KEY_API = 'tradfri_api' CONF_ALLOW_TRADFRI_GROUPS = 'allow_tradfri_groups' DEFAULT_ALLOW_TRADFRI_GROUPS = False CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ vol.Optional(CONF_HOST): cv.string, vol.Optional(CONF_ALLOW_TRADFRI_GROUPS, default=DEFAULT_ALLOW_TRADFRI_GROUPS): cv.boolean, }) }, extra=vol.ALLOW_EXTRA) _LOGGER = logging.getLogger(__name__) async def async_setup(hass, config): """Set up the Tradfri component.""" conf = config.get(DOMAIN) if conf is None: return True configured_hosts = [entry.data['host'] for entry in hass.config_entries.async_entries(DOMAIN)] legacy_hosts = await hass.async_add_executor_job( load_json, hass.config.path(CONFIG_FILE)) for host, info in legacy_hosts.items(): if host in configured_hosts: continue info[CONF_HOST] = host info[CONF_IMPORT_GROUPS] = conf[CONF_ALLOW_TRADFRI_GROUPS] hass.async_create_task(hass.config_entries.flow.async_init( DOMAIN, context={'source': config_entries.SOURCE_IMPORT}, data=info )) host = conf.get(CONF_HOST) import_groups = conf[CONF_ALLOW_TRADFRI_GROUPS] if host is None or host in configured_hosts or host in legacy_hosts: return True hass.async_create_task(hass.config_entries.flow.async_init( DOMAIN, context={'source': config_entries.SOURCE_IMPORT}, data={CONF_HOST: host, CONF_IMPORT_GROUPS: import_groups} )) return True async def async_setup_entry(hass, entry): """Create a gateway.""" # host, identity, key, allow_tradfri_groups from pytradfri import Gateway, RequestError # pylint: disable=import-error from pytradfri.api.aiocoap_api import APIFactory factory = APIFactory( entry.data[CONF_HOST], psk_id=entry.data[CONF_IDENTITY], psk=entry.data[CONF_KEY], loop=hass.loop ) async def on_hass_stop(event): """Close connection when hass stops.""" await factory.shutdown() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop) api = factory.request gateway = Gateway() try: gateway_info = await api(gateway.get_gateway_info()) except RequestError: _LOGGER.error("Tradfri setup failed.") return False hass.data.setdefault(KEY_API, {})[entry.entry_id] = api hass.data.setdefault(KEY_GATEWAY, {})[entry.entry_id] = gateway dev_reg = await hass.helpers.device_registry.async_get_registry() dev_reg.async_get_or_create( config_entry_id=entry.entry_id, connections=set(), identifiers={ (DOMAIN, entry.data[CONF_GATEWAY_ID]) }, manufacturer='IKEA', name='Gateway', # They just have 1 gateway model. Type is not exposed yet. model='E1526', sw_version=gateway_info.firmware_version, ) hass.async_create_task(hass.config_entries.async_forward_entry_setup( entry, 'light' )) hass.async_create_task(hass.config_entries.async_forward_entry_setup( entry, 'sensor' )) hass.async_create_task(hass.config_entries.async_forward_entry_setup( entry, 'switch' )) return True
"""The tests for the Template fan platform.""" import logging import pytest from homeassistant import setup from homeassistant.const import STATE_ON, STATE_OFF from homeassistant.components.fan import ( ATTR_SPEED, ATTR_OSCILLATING, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, ATTR_DIRECTION, DIRECTION_FORWARD, DIRECTION_REVERSE) from tests.common import ( async_mock_service, assert_setup_component) from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = 'fan.test_fan' # Represent for fan's state _STATE_INPUT_BOOLEAN = 'input_boolean.state' # Represent for fan's speed _SPEED_INPUT_SELECT = 'input_select.speed' # Represent for fan's oscillating _OSC_INPUT = 'input_select.osc' # Represent for fan's direction _DIRECTION_INPUT_SELECT = 'input_select.direction' @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'turn_on': { 'service': 'script.fan_on' } } } }) await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, 'True') hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'on' }}", 'speed_template': "{{ 'medium' }}", 'oscillating_template': "{{ 1 == 1 }}", 'direction_template': "{{ 'forward' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, 'fan'): assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': { 'value_template': "{{ 'abc' }}", 'speed_template': "{{ '0' }}", 'oscillating_template': "{{ 'xyz' }}", 'direction_template': "{{ 'right' }}", 'turn_on': { 'service': 'script.fan_on' }, 'turn_off': { 'service': 'script.fan_off' } } } } }) await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan common.async_turn_off(hass, _TEST_FAN) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to high common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' common.async_set_speed(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ['1', '2', '3']) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's speed to '1' common.async_set_speed(hass, _TEST_FAN, '1') await hass.async_block_till_done() # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) # Set fan's speed to 'medium' which is invalid common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) await hass.async_block_till_done() # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == '1' _verify(hass, STATE_ON, '1', None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, False) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'False' _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to 'invalid' common.async_oscillate(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's osc to True common.async_oscillate(hass, _TEST_FAN, True) await hass.async_block_till_done() # verify assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False common.async_oscillate(hass, _TEST_FAN, None) await hass.async_block_till_done() # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == 'True' _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state \ == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == '' _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan common.async_turn_on(hass, _TEST_FAN) await hass.async_block_till_done() # Set fan's direction to forward common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) await hass.async_block_till_done() # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' common.async_set_direction(hass, _TEST_FAN, 'invalid') await hass.async_block_till_done() # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == \ DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify(hass, expected_state, expected_speed, expected_oscillating, expected_direction): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED, None) == expected_speed assert attributes.get(ATTR_OSCILLATING, None) == expected_oscillating assert attributes.get(ATTR_DIRECTION, None) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, 'input_boolean'): assert await setup.async_setup_component( hass, 'input_boolean', {'input_boolean': {'state': None}} ) with assert_setup_component(3, 'input_select'): assert await setup.async_setup_component(hass, 'input_select', { 'input_select': { 'speed': { 'name': 'Speed', 'options': ['', SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, '1', '2', '3'] }, 'osc': { 'name': 'oscillating', 'options': ['', 'True', 'False'] }, 'direction': { 'name': 'Direction', 'options': ['', DIRECTION_FORWARD, DIRECTION_REVERSE] }, } }) with assert_setup_component(1, 'fan'): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { 'value_template': value_template, 'speed_template': "{{ states('input_select.speed') }}", 'oscillating_template': "{{ states('input_select.osc') }}", 'direction_template': "{{ states('input_select.direction') }}", 'turn_on': { 'service': 'input_boolean.turn_on', 'entity_id': _STATE_INPUT_BOOLEAN }, 'turn_off': { 'service': 'input_boolean.turn_off', 'entity_id': _STATE_INPUT_BOOLEAN }, 'set_speed': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _SPEED_INPUT_SELECT, 'option': '{{ speed }}' } }, 'set_oscillating': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _OSC_INPUT, 'option': '{{ oscillating }}' } }, 'set_direction': { 'service': 'input_select.select_option', 'data_template': { 'entity_id': _DIRECTION_INPUT_SELECT, 'option': '{{ direction }}' } } } if speed_list: test_fan_config['speeds'] = speed_list assert await setup.async_setup_component(hass, 'fan', { 'fan': { 'platform': 'template', 'fans': { 'test_fan': test_fan_config } } }) await hass.async_start() await hass.async_block_till_done()
tinloaf/home-assistant
tests/components/fan/test_template.py
homeassistant/components/tradfri/__init__.py
import datetime import warnings import operator import numpy as np from pandas._libs import (lib, index as libindex, tslib as libts, algos as libalgos, join as libjoin, Timestamp, Timedelta, ) from pandas._libs.lib import is_datetime_array from pandas.compat import range, u from pandas.compat.numpy import function as nv from pandas import compat from pandas.core.dtypes.generic import ( ABCSeries, ABCMultiIndex, ABCPeriodIndex, ABCDateOffset) from pandas.core.dtypes.missing import isna, array_equivalent from pandas.core.dtypes.common import ( _ensure_int64, _ensure_object, _ensure_categorical, _ensure_platform_int, is_integer, is_float, is_dtype_equal, is_object_dtype, is_categorical_dtype, is_interval_dtype, is_bool, is_bool_dtype, is_signed_integer_dtype, is_unsigned_integer_dtype, is_integer_dtype, is_float_dtype, is_datetime64_any_dtype, is_timedelta64_dtype, needs_i8_conversion, is_iterator, is_list_like, is_scalar) from pandas.core.common import (is_bool_indexer, _values_from_object, _asarray_tuplesafe) from pandas.core.base import PandasObject, IndexOpsMixin import pandas.core.base as base from pandas.util._decorators import ( Appender, Substitution, cache_readonly, deprecate_kwarg) from pandas.core.indexes.frozen import FrozenList import pandas.core.common as com import pandas.core.dtypes.concat as _concat import pandas.core.missing as missing import pandas.core.algorithms as algos import pandas.core.sorting as sorting from pandas.io.formats.printing import pprint_thing from pandas.core.ops import _comp_method_OBJECT_ARRAY from pandas.core.strings import StringAccessorMixin from pandas.core.config import get_option # simplify default_pprint = lambda x, max_seq_items=None: \ pprint_thing(x, escape_chars=('\t', '\r', '\n'), quote_strings=True, max_seq_items=max_seq_items) __all__ = ['Index'] _unsortable_types = frozenset(('mixed', 'mixed-integer')) _index_doc_kwargs = dict(klass='Index', inplace='', target_klass='Index', unique='Index', duplicated='np.ndarray') _index_shared_docs = dict() def _try_get_item(x): try: return x.item() except AttributeError: return x class InvalidIndexError(Exception): pass _o_dtype = np.dtype(object) _Identity = object def _new_Index(cls, d): """ This is called upon unpickling, rather than the default which doesn't have arguments and breaks __new__ """ # required for backward compat, because PI can't be instantiated with # ordinals through __new__ GH #13277 if issubclass(cls, ABCPeriodIndex): from pandas.core.indexes.period import _new_PeriodIndex return _new_PeriodIndex(cls, **d) return cls.__new__(cls, **d) class Index(IndexOpsMixin, StringAccessorMixin, PandasObject): """ Immutable ndarray implementing an ordered, sliceable set. The basic object storing axis labels for all pandas objects Parameters ---------- data : array-like (1-dimensional) dtype : NumPy dtype (default: object) copy : bool Make a copy of input ndarray name : object Name to be stored in the index tupleize_cols : bool (default: True) When True, attempt to create a MultiIndex if possible Notes ----- An Index instance can **only** contain hashable objects """ # To hand over control to subclasses _join_precedence = 1 # Cython methods _arrmap = libalgos.arrmap_object _left_indexer_unique = libjoin.left_join_indexer_unique_object _left_indexer = libjoin.left_join_indexer_object _inner_indexer = libjoin.inner_join_indexer_object _outer_indexer = libjoin.outer_join_indexer_object _box_scalars = False _typ = 'index' _data = None _id = None name = None asi8 = None _comparables = ['name'] _attributes = ['name'] _allow_index_ops = True _allow_datetime_index_ops = False _allow_period_index_ops = False _is_numeric_dtype = False _can_hold_na = True # would we like our indexing holder to defer to us _defer_to_indexing = False # prioritize current class for _shallow_copy_with_infer, # used to infer integers as datetime-likes _infer_as_myclass = False _engine_type = libindex.ObjectEngine def __new__(cls, data=None, dtype=None, copy=False, name=None, fastpath=False, tupleize_cols=True, **kwargs): if name is None and hasattr(data, 'name'): name = data.name if fastpath: return cls._simple_new(data, name) from .range import RangeIndex # range if isinstance(data, RangeIndex): return RangeIndex(start=data, copy=copy, dtype=dtype, name=name) elif isinstance(data, range): return RangeIndex.from_range(data, copy=copy, dtype=dtype, name=name) # categorical if is_categorical_dtype(data) or is_categorical_dtype(dtype): from .category import CategoricalIndex return CategoricalIndex(data, copy=copy, name=name, **kwargs) # interval if is_interval_dtype(data): from .interval import IntervalIndex return IntervalIndex.from_intervals(data, name=name, copy=copy) # index-like elif isinstance(data, (np.ndarray, Index, ABCSeries)): if (is_datetime64_any_dtype(data) or (dtype is not None and is_datetime64_any_dtype(dtype)) or 'tz' in kwargs): from pandas.core.indexes.datetimes import DatetimeIndex result = DatetimeIndex(data, copy=copy, name=name, dtype=dtype, **kwargs) if dtype is not None and is_dtype_equal(_o_dtype, dtype): return Index(result.to_pydatetime(), dtype=_o_dtype) else: return result elif (is_timedelta64_dtype(data) or (dtype is not None and is_timedelta64_dtype(dtype))): from pandas.core.indexes.timedeltas import TimedeltaIndex result = TimedeltaIndex(data, copy=copy, name=name, **kwargs) if dtype is not None and _o_dtype == dtype: return Index(result.to_pytimedelta(), dtype=_o_dtype) else: return result if dtype is not None: try: # we need to avoid having numpy coerce # things that look like ints/floats to ints unless # they are actually ints, e.g. '0' and 0.0 # should not be coerced # GH 11836 if is_integer_dtype(dtype): inferred = lib.infer_dtype(data) if inferred == 'integer': data = np.array(data, copy=copy, dtype=dtype) elif inferred in ['floating', 'mixed-integer-float']: if isna(data).any(): raise ValueError('cannot convert float ' 'NaN to integer') # If we are actually all equal to integers, # then coerce to integer. try: return cls._try_convert_to_int_index( data, copy, name) except ValueError: pass # Return an actual float index. from .numeric import Float64Index return Float64Index(data, copy=copy, dtype=dtype, name=name) elif inferred == 'string': pass else: data = data.astype(dtype) elif is_float_dtype(dtype): inferred = lib.infer_dtype(data) if inferred == 'string': pass else: data = data.astype(dtype) else: data = np.array(data, dtype=dtype, copy=copy) except (TypeError, ValueError) as e: msg = str(e) if 'cannot convert float' in msg: raise # maybe coerce to a sub-class from pandas.core.indexes.period import ( PeriodIndex, IncompatibleFrequency) if isinstance(data, PeriodIndex): return PeriodIndex(data, copy=copy, name=name, **kwargs) if is_signed_integer_dtype(data.dtype): from .numeric import Int64Index return Int64Index(data, copy=copy, dtype=dtype, name=name) elif is_unsigned_integer_dtype(data.dtype): from .numeric import UInt64Index return UInt64Index(data, copy=copy, dtype=dtype, name=name) elif is_float_dtype(data.dtype): from .numeric import Float64Index return Float64Index(data, copy=copy, dtype=dtype, name=name) elif issubclass(data.dtype.type, np.bool) or is_bool_dtype(data): subarr = data.astype('object') else: subarr = _asarray_tuplesafe(data, dtype=object) # _asarray_tuplesafe does not always copy underlying data, # so need to make sure that this happens if copy: subarr = subarr.copy() if dtype is None: inferred = lib.infer_dtype(subarr) if inferred == 'integer': try: return cls._try_convert_to_int_index( subarr, copy, name) except ValueError: pass return Index(subarr, copy=copy, dtype=object, name=name) elif inferred in ['floating', 'mixed-integer-float']: from .numeric import Float64Index return Float64Index(subarr, copy=copy, name=name) elif inferred == 'interval': from .interval import IntervalIndex return IntervalIndex.from_intervals(subarr, name=name, copy=copy) elif inferred == 'boolean': # don't support boolean explicity ATM pass elif inferred != 'string': if inferred.startswith('datetime'): if (lib.is_datetime_with_singletz_array(subarr) or 'tz' in kwargs): # only when subarr has the same tz from pandas.core.indexes.datetimes import ( DatetimeIndex) try: return DatetimeIndex(subarr, copy=copy, name=name, **kwargs) except libts.OutOfBoundsDatetime: pass elif inferred.startswith('timedelta'): from pandas.core.indexes.timedeltas import ( TimedeltaIndex) return TimedeltaIndex(subarr, copy=copy, name=name, **kwargs) elif inferred == 'period': try: return PeriodIndex(subarr, name=name, **kwargs) except IncompatibleFrequency: pass return cls._simple_new(subarr, name) elif hasattr(data, '__array__'): return Index(np.asarray(data), dtype=dtype, copy=copy, name=name, **kwargs) elif data is None or is_scalar(data): cls._scalar_data_error(data) else: if (tupleize_cols and isinstance(data, list) and data and isinstance(data[0], tuple)): # we must be all tuples, otherwise don't construct # 10697 if all(isinstance(e, tuple) for e in data): try: # must be orderable in py3 if compat.PY3: sorted(data) from .multi import MultiIndex return MultiIndex.from_tuples( data, names=name or kwargs.get('names')) except (TypeError, KeyError): # python2 - MultiIndex fails on mixed types pass # other iterable of some kind subarr = _asarray_tuplesafe(data, dtype=object) return Index(subarr, dtype=dtype, copy=copy, name=name, **kwargs) """ NOTE for new Index creation: - _simple_new: It returns new Index with the same type as the caller. All metadata (such as name) must be provided by caller's responsibility. Using _shallow_copy is recommended because it fills these metadata otherwise specified. - _shallow_copy: It returns new Index with the same type (using _simple_new), but fills caller's metadata otherwise specified. Passed kwargs will overwrite corresponding metadata. - _shallow_copy_with_infer: It returns new Index inferring its type from passed values. It fills caller's metadata otherwise specified as the same as _shallow_copy. See each method's docstring. """ @classmethod def _simple_new(cls, values, name=None, dtype=None, **kwargs): """ we require the we have a dtype compat for the values if we are passed a non-dtype compat, then coerce using the constructor Must be careful not to recurse. """ if not hasattr(values, 'dtype'): if values is None and dtype is not None: values = np.empty(0, dtype=dtype) else: values = np.array(values, copy=False) if is_object_dtype(values): values = cls(values, name=name, dtype=dtype, **kwargs)._values result = object.__new__(cls) result._data = values result.name = name for k, v in compat.iteritems(kwargs): setattr(result, k, v) return result._reset_identity() _index_shared_docs['_shallow_copy'] = """ create a new Index with the same class as the caller, don't copy the data, use the same object attributes with passed in attributes taking precedence *this is an internal non-public method* Parameters ---------- values : the values to create the new Index, optional kwargs : updates the default attributes for this Index """ @Appender(_index_shared_docs['_shallow_copy']) def _shallow_copy(self, values=None, **kwargs): if values is None: values = self.values attributes = self._get_attributes_dict() attributes.update(kwargs) return self._simple_new(values, **attributes) def _shallow_copy_with_infer(self, values=None, **kwargs): """ create a new Index inferring the class with passed value, don't copy the data, use the same object attributes with passed in attributes taking precedence *this is an internal non-public method* Parameters ---------- values : the values to create the new Index, optional kwargs : updates the default attributes for this Index """ if values is None: values = self.values attributes = self._get_attributes_dict() attributes.update(kwargs) attributes['copy'] = False if self._infer_as_myclass: try: return self._constructor(values, **attributes) except (TypeError, ValueError): pass return Index(values, **attributes) def _deepcopy_if_needed(self, orig, copy=False): """ .. versionadded:: 0.19.0 Make a copy of self if data coincides (in memory) with orig. Subclasses should override this if self._base is not an ndarray. Parameters ---------- orig : ndarray other ndarray to compare self._data against copy : boolean, default False when False, do not run any check, just return self Returns ------- A copy of self if needed, otherwise self : Index """ if copy: # Retrieve the "base objects", i.e. the original memory allocations orig = orig if orig.base is None else orig.base new = self._data if self._data.base is None else self._data.base if orig is new: return self.copy(deep=True) return self def _update_inplace(self, result, **kwargs): # guard when called from IndexOpsMixin raise TypeError("Index can't be updated inplace") def _sort_levels_monotonic(self): """ compat with MultiIndex """ return self _index_shared_docs['_get_grouper_for_level'] = """ Get index grouper corresponding to an index level Parameters ---------- mapper: Group mapping function or None Function mapping index values to groups level : int or None Index level Returns ------- grouper : Index Index of values to group on labels : ndarray of int or None Array of locations in level_index uniques : Index or None Index of unique values for level """ @Appender(_index_shared_docs['_get_grouper_for_level']) def _get_grouper_for_level(self, mapper, level=None): assert level is None or level == 0 if mapper is None: grouper = self else: grouper = self.map(mapper) return grouper, None, None def is_(self, other): """ More flexible, faster check like ``is`` but that works through views Note: this is *not* the same as ``Index.identical()``, which checks that metadata is also the same. Parameters ---------- other : object other object to compare against. Returns ------- True if both have same underlying data, False otherwise : bool """ # use something other than None to be clearer return self._id is getattr( other, '_id', Ellipsis) and self._id is not None def _reset_identity(self): """Initializes or resets ``_id`` attribute with new object""" self._id = _Identity() return self # ndarray compat def __len__(self): """ return the length of the Index """ return len(self._data) def __array__(self, dtype=None): """ the array interface, return my values """ return self._data.view(np.ndarray) def __array_wrap__(self, result, context=None): """ Gets called after a ufunc """ if is_bool_dtype(result): return result attrs = self._get_attributes_dict() attrs = self._maybe_update_attributes(attrs) return Index(result, **attrs) @cache_readonly def dtype(self): """ return the dtype object of the underlying data """ return self._data.dtype @cache_readonly def dtype_str(self): """ return the dtype str of the underlying data """ return str(self.dtype) @property def values(self): """ return the underlying data as an ndarray """ return self._data.view(np.ndarray) def get_values(self): """ return the underlying data as an ndarray """ return self.values @Appender(IndexOpsMixin.memory_usage.__doc__) def memory_usage(self, deep=False): result = super(Index, self).memory_usage(deep=deep) # include our engine hashtable result += self._engine.sizeof(deep=deep) return result # ops compat def tolist(self): """ return a list of the Index values """ return list(self.values) @deprecate_kwarg(old_arg_name='n', new_arg_name='repeats') def repeat(self, repeats, *args, **kwargs): """ Repeat elements of an Index. Refer to `numpy.ndarray.repeat` for more information about the `repeats` argument. See also -------- numpy.ndarray.repeat """ nv.validate_repeat(args, kwargs) return self._shallow_copy(self._values.repeat(repeats)) _index_shared_docs['where'] = """ .. versionadded:: 0.19.0 Return an Index of same shape as self and whose corresponding entries are from self where cond is True and otherwise are from other. Parameters ---------- cond : boolean array-like with the same length as self other : scalar, or array-like """ @Appender(_index_shared_docs['where']) def where(self, cond, other=None): if other is None: other = self._na_value dtype = self.dtype values = self.values if is_bool(other) or is_bool_dtype(other): # bools force casting values = values.astype(object) dtype = None values = np.where(cond, values, other) if self._is_numeric_dtype and np.any(isna(values)): # We can't coerce to the numeric dtype of "self" (unless # it's float) if there are NaN values in our output. dtype = None return self._shallow_copy_with_infer(values, dtype=dtype) def ravel(self, order='C'): """ return an ndarray of the flattened values of the underlying data See also -------- numpy.ndarray.ravel """ return self._values.ravel(order=order) # construction helpers @classmethod def _try_convert_to_int_index(cls, data, copy, name): """ Attempt to convert an array of data into an integer index. Parameters ---------- data : The data to convert. copy : Whether to copy the data or not. name : The name of the index returned. Returns ------- int_index : data converted to either an Int64Index or a UInt64Index Raises ------ ValueError if the conversion was not successful. """ from .numeric import Int64Index, UInt64Index try: res = data.astype('i8', copy=False) if (res == data).all(): return Int64Index(res, copy=copy, name=name) except (OverflowError, TypeError, ValueError): pass # Conversion to int64 failed (possibly due to # overflow), so let's try now with uint64. try: res = data.astype('u8', copy=False) if (res == data).all(): return UInt64Index(res, copy=copy, name=name) except (OverflowError, TypeError, ValueError): pass raise ValueError @classmethod def _scalar_data_error(cls, data): raise TypeError('{0}(...) must be called with a collection of some ' 'kind, {1} was passed'.format(cls.__name__, repr(data))) @classmethod def _string_data_error(cls, data): raise TypeError('String dtype not supported, you may need ' 'to explicitly cast to a numeric type') @classmethod def _coerce_to_ndarray(cls, data): """coerces data to ndarray, raises on scalar data. Converts other iterables to list first and then to array. Does not touch ndarrays. """ if not isinstance(data, (np.ndarray, Index)): if data is None or is_scalar(data): cls._scalar_data_error(data) # other iterable of some kind if not isinstance(data, (ABCSeries, list, tuple)): data = list(data) data = np.asarray(data) return data def _get_attributes_dict(self): """ return an attributes dict for my class """ return dict([(k, getattr(self, k, None)) for k in self._attributes]) def view(self, cls=None): # we need to see if we are subclassing an # index type here if cls is not None and not hasattr(cls, '_typ'): result = self._data.view(cls) else: result = self._shallow_copy() if isinstance(result, Index): result._id = self._id return result def _coerce_scalar_to_index(self, item): """ we need to coerce a scalar to a compat for our index type Parameters ---------- item : scalar item to coerce """ dtype = self.dtype if self._is_numeric_dtype and isna(item): # We can't coerce to the numeric dtype of "self" (unless # it's float) if there are NaN values in our output. dtype = None return Index([item], dtype=dtype, **self._get_attributes_dict()) _index_shared_docs['copy'] = """ Make a copy of this object. Name and dtype sets those attributes on the new object. Parameters ---------- name : string, optional deep : boolean, default False dtype : numpy dtype or pandas type Returns ------- copy : Index Notes ----- In most cases, there should be no functional difference from using ``deep``, but if ``deep`` is passed it will attempt to deepcopy. """ @Appender(_index_shared_docs['copy']) def copy(self, name=None, deep=False, dtype=None, **kwargs): if deep: new_index = self._shallow_copy(self._data.copy()) else: new_index = self._shallow_copy() names = kwargs.get('names') names = self._validate_names(name=name, names=names, deep=deep) new_index = new_index.set_names(names) if dtype: new_index = new_index.astype(dtype) return new_index def __copy__(self, **kwargs): return self.copy(**kwargs) def __deepcopy__(self, memo=None): if memo is None: memo = {} return self.copy(deep=True) def _validate_names(self, name=None, names=None, deep=False): """ Handles the quirks of having a singular 'name' parameter for general Index and plural 'names' parameter for MultiIndex. """ from copy import deepcopy if names is not None and name is not None: raise TypeError("Can only provide one of `names` and `name`") elif names is None and name is None: return deepcopy(self.names) if deep else self.names elif names is not None: if not is_list_like(names): raise TypeError("Must pass list-like as `names`.") return names else: if not is_list_like(name): return [name] return name def __unicode__(self): """ Return a string representation for this object. Invoked by unicode(df) in py2 only. Yields a Unicode String in both py2/py3. """ klass = self.__class__.__name__ data = self._format_data() attrs = self._format_attrs() space = self._format_space() prepr = (u(",%s") % space).join([u("%s=%s") % (k, v) for k, v in attrs]) # no data provided, just attributes if data is None: data = '' res = u("%s(%s%s)") % (klass, data, prepr) return res def _format_space(self): # using space here controls if the attributes # are line separated or not (the default) # max_seq_items = get_option('display.max_seq_items') # if len(self) > max_seq_items: # space = "\n%s" % (' ' * (len(klass) + 1)) return " " @property def _formatter_func(self): """ Return the formatted data as a unicode string """ return default_pprint def _format_data(self): """ Return the formatted data as a unicode string """ from pandas.io.formats.console import get_console_size from pandas.io.formats.format import _get_adjustment display_width, _ = get_console_size() if display_width is None: display_width = get_option('display.width') or 80 space1 = "\n%s" % (' ' * (len(self.__class__.__name__) + 1)) space2 = "\n%s" % (' ' * (len(self.__class__.__name__) + 2)) n = len(self) sep = ',' max_seq_items = get_option('display.max_seq_items') or n formatter = self._formatter_func # do we want to justify (only do so for non-objects) is_justify = not (self.inferred_type in ('string', 'unicode') or (self.inferred_type == 'categorical' and is_object_dtype(self.categories))) # are we a truncated display is_truncated = n > max_seq_items # adj can optionaly handle unicode eastern asian width adj = _get_adjustment() def _extend_line(s, line, value, display_width, next_line_prefix): if (adj.len(line.rstrip()) + adj.len(value.rstrip()) >= display_width): s += line.rstrip() line = next_line_prefix line += value return s, line def best_len(values): if values: return max([adj.len(x) for x in values]) else: return 0 if n == 0: summary = '[], ' elif n == 1: first = formatter(self[0]) summary = '[%s], ' % first elif n == 2: first = formatter(self[0]) last = formatter(self[-1]) summary = '[%s, %s], ' % (first, last) else: if n > max_seq_items: n = min(max_seq_items // 2, 10) head = [formatter(x) for x in self[:n]] tail = [formatter(x) for x in self[-n:]] else: head = [] tail = [formatter(x) for x in self] # adjust all values to max length if needed if is_justify: # however, if we are not truncated and we are only a single # line, then don't justify if (is_truncated or not (len(', '.join(head)) < display_width and len(', '.join(tail)) < display_width)): max_len = max(best_len(head), best_len(tail)) head = [x.rjust(max_len) for x in head] tail = [x.rjust(max_len) for x in tail] summary = "" line = space2 for i in range(len(head)): word = head[i] + sep + ' ' summary, line = _extend_line(summary, line, word, display_width, space2) if is_truncated: # remove trailing space of last line summary += line.rstrip() + space2 + '...' line = space2 for i in range(len(tail) - 1): word = tail[i] + sep + ' ' summary, line = _extend_line(summary, line, word, display_width, space2) # last value: no sep added + 1 space of width used for trailing ',' summary, line = _extend_line(summary, line, tail[-1], display_width - 2, space2) summary += line summary += '],' if len(summary) > (display_width): summary += space1 else: # one row summary += ' ' # remove initial space summary = '[' + summary[len(space2):] return summary def _format_attrs(self): """ Return a list of tuples of the (attr,formatted_value) """ attrs = [] attrs.append(('dtype', "'%s'" % self.dtype)) if self.name is not None: attrs.append(('name', default_pprint(self.name))) max_seq_items = get_option('display.max_seq_items') or len(self) if len(self) > max_seq_items: attrs.append(('length', len(self))) return attrs def to_series(self, **kwargs): """ Create a Series with both index and values equal to the index keys useful with map for returning an indexer based on an index Returns ------- Series : dtype will be based on the type of the Index values. """ from pandas import Series return Series(self._to_embed(), index=self._shallow_copy(), name=self.name) def _to_embed(self, keep_tz=False): """ *this is an internal non-public method* return an array repr of this object, potentially casting to object """ return self.values.copy() _index_shared_docs['astype'] = """ Create an Index with values cast to dtypes. The class of a new Index is determined by dtype. When conversion is impossible, a ValueError exception is raised. Parameters ---------- dtype : numpy dtype or pandas type copy : bool, default True By default, astype always returns a newly allocated object. If copy is set to False and internal requirements on dtype are satisfied, the original data is used to create a new Index or the original Index is returned. .. versionadded:: 0.19.0 """ @Appender(_index_shared_docs['astype']) def astype(self, dtype, copy=True): return Index(self.values.astype(dtype, copy=copy), name=self.name, dtype=dtype) def _to_safe_for_reshape(self): """ convert to object if we are a categorical """ return self def to_datetime(self, dayfirst=False): """ DEPRECATED: use :meth:`pandas.to_datetime` instead. For an Index containing strings or datetime.datetime objects, attempt conversion to DatetimeIndex """ warnings.warn("to_datetime is deprecated. Use pd.to_datetime(...)", FutureWarning, stacklevel=2) from pandas.core.indexes.datetimes import DatetimeIndex if self.inferred_type == 'string': from dateutil.parser import parse parser = lambda x: parse(x, dayfirst=dayfirst) parsed = lib.try_parse_dates(self.values, parser=parser) return DatetimeIndex(parsed) else: return DatetimeIndex(self.values) def _assert_can_do_setop(self, other): if not is_list_like(other): raise TypeError('Input must be Index or array-like') return True def _convert_can_do_setop(self, other): if not isinstance(other, Index): other = Index(other, name=self.name) result_name = self.name else: result_name = self.name if self.name == other.name else None return other, result_name def _convert_for_op(self, value): """ Convert value to be insertable to ndarray """ return value def _assert_can_do_op(self, value): """ Check value is valid for scalar op """ if not lib.isscalar(value): msg = "'value' must be a scalar, passed: {0}" raise TypeError(msg.format(type(value).__name__)) @property def nlevels(self): return 1 def _get_names(self): return FrozenList((self.name, )) def _set_names(self, values, level=None): if len(values) != 1: raise ValueError('Length of new names must be 1, got %d' % len(values)) self.name = values[0] names = property(fset=_set_names, fget=_get_names) def set_names(self, names, level=None, inplace=False): """ Set new names on index. Defaults to returning new index. Parameters ---------- names : str or sequence name(s) to set level : int, level name, or sequence of int/level names (default None) If the index is a MultiIndex (hierarchical), level(s) to set (None for all levels). Otherwise level must be None inplace : bool if True, mutates in place Returns ------- new index (of same type and class...etc) [if inplace, returns None] Examples -------- >>> Index([1, 2, 3, 4]).set_names('foo') Int64Index([1, 2, 3, 4], dtype='int64') >>> Index([1, 2, 3, 4]).set_names(['foo']) Int64Index([1, 2, 3, 4], dtype='int64') >>> idx = MultiIndex.from_tuples([(1, u'one'), (1, u'two'), (2, u'one'), (2, u'two')], names=['foo', 'bar']) >>> idx.set_names(['baz', 'quz']) MultiIndex(levels=[[1, 2], [u'one', u'two']], labels=[[0, 0, 1, 1], [0, 1, 0, 1]], names=[u'baz', u'quz']) >>> idx.set_names('baz', level=0) MultiIndex(levels=[[1, 2], [u'one', u'two']], labels=[[0, 0, 1, 1], [0, 1, 0, 1]], names=[u'baz', u'bar']) """ if level is not None and self.nlevels == 1: raise ValueError('Level must be None for non-MultiIndex') if level is not None and not is_list_like(level) and is_list_like( names): raise TypeError("Names must be a string") if not is_list_like(names) and level is None and self.nlevels > 1: raise TypeError("Must pass list-like as `names`.") if not is_list_like(names): names = [names] if level is not None and not is_list_like(level): level = [level] if inplace: idx = self else: idx = self._shallow_copy() idx._set_names(names, level=level) if not inplace: return idx def rename(self, name, inplace=False): """ Set new names on index. Defaults to returning new index. Parameters ---------- name : str or list name to set inplace : bool if True, mutates in place Returns ------- new index (of same type and class...etc) [if inplace, returns None] """ return self.set_names([name], inplace=inplace) def reshape(self, *args, **kwargs): """ NOT IMPLEMENTED: do not call this method, as reshaping is not supported for Index objects and will raise an error. Reshape an Index. """ raise NotImplementedError("reshaping is not supported " "for Index objects") @property def _has_complex_internals(self): # to disable groupby tricks in MultiIndex return False def summary(self, name=None): if len(self) > 0: head = self[0] if (hasattr(head, 'format') and not isinstance(head, compat.string_types)): head = head.format() tail = self[-1] if (hasattr(tail, 'format') and not isinstance(tail, compat.string_types)): tail = tail.format() index_summary = ', %s to %s' % (pprint_thing(head), pprint_thing(tail)) else: index_summary = '' if name is None: name = type(self).__name__ return '%s: %s entries%s' % (name, len(self), index_summary) def _mpl_repr(self): # how to represent ourselves to matplotlib return self.values _na_value = np.nan """The expected NA value to use with this index.""" # introspection @property def is_monotonic(self): """ alias for is_monotonic_increasing (deprecated) """ return self.is_monotonic_increasing @property def is_monotonic_increasing(self): """ return if the index is monotonic increasing (only equal or increasing) values. Examples -------- >>> Index([1, 2, 3]).is_monotonic_increasing True >>> Index([1, 2, 2]).is_monotonic_increasing True >>> Index([1, 3, 2]).is_monotonic_increasing False """ return self._engine.is_monotonic_increasing @property def is_monotonic_decreasing(self): """ return if the index is monotonic decreasing (only equal or decreasing) values. Examples -------- >>> Index([3, 2, 1]).is_monotonic_decreasing True >>> Index([3, 2, 2]).is_monotonic_decreasing True >>> Index([3, 1, 2]).is_monotonic_decreasing False """ return self._engine.is_monotonic_decreasing @property def _is_strictly_monotonic_increasing(self): """return if the index is strictly monotonic increasing (only increasing) values Examples -------- >>> Index([1, 2, 3])._is_strictly_monotonic_increasing True >>> Index([1, 2, 2])._is_strictly_monotonic_increasing False >>> Index([1, 3, 2])._is_strictly_monotonic_increasing False """ return self.is_unique and self.is_monotonic_increasing @property def _is_strictly_monotonic_decreasing(self): """return if the index is strictly monotonic decreasing (only decreasing) values Examples -------- >>> Index([3, 2, 1])._is_strictly_monotonic_decreasing True >>> Index([3, 2, 2])._is_strictly_monotonic_decreasing False >>> Index([3, 1, 2])._is_strictly_monotonic_decreasing False """ return self.is_unique and self.is_monotonic_decreasing def is_lexsorted_for_tuple(self, tup): return True @cache_readonly(allow_setting=True) def is_unique(self): """ return if the index has unique values """ return self._engine.is_unique @property def has_duplicates(self): return not self.is_unique def is_boolean(self): return self.inferred_type in ['boolean'] def is_integer(self): return self.inferred_type in ['integer'] def is_floating(self): return self.inferred_type in ['floating', 'mixed-integer-float'] def is_numeric(self): return self.inferred_type in ['integer', 'floating'] def is_object(self): return is_object_dtype(self.dtype) def is_categorical(self): return self.inferred_type in ['categorical'] def is_interval(self): return self.inferred_type in ['interval'] def is_mixed(self): return self.inferred_type in ['mixed'] def holds_integer(self): return self.inferred_type in ['integer', 'mixed-integer'] _index_shared_docs['_convert_scalar_indexer'] = """ Convert a scalar indexer. Parameters ---------- key : label of the slice bound kind : {'ix', 'loc', 'getitem', 'iloc'} or None """ @Appender(_index_shared_docs['_convert_scalar_indexer']) def _convert_scalar_indexer(self, key, kind=None): assert kind in ['ix', 'loc', 'getitem', 'iloc', None] if kind == 'iloc': return self._validate_indexer('positional', key, kind) if len(self) and not isinstance(self, ABCMultiIndex,): # we can raise here if we are definitive that this # is positional indexing (eg. .ix on with a float) # or label indexing if we are using a type able # to be represented in the index if kind in ['getitem', 'ix'] and is_float(key): if not self.is_floating(): return self._invalid_indexer('label', key) elif kind in ['loc'] and is_float(key): # we want to raise KeyError on string/mixed here # technically we *could* raise a TypeError # on anything but mixed though if self.inferred_type not in ['floating', 'mixed-integer-float', 'string', 'unicode', 'mixed']: return self._invalid_indexer('label', key) elif kind in ['loc'] and is_integer(key): if not self.holds_integer(): return self._invalid_indexer('label', key) return key _index_shared_docs['_convert_slice_indexer'] = """ Convert a slice indexer. By definition, these are labels unless 'iloc' is passed in. Floats are not allowed as the start, step, or stop of the slice. Parameters ---------- key : label of the slice bound kind : {'ix', 'loc', 'getitem', 'iloc'} or None """ @Appender(_index_shared_docs['_convert_slice_indexer']) def _convert_slice_indexer(self, key, kind=None): assert kind in ['ix', 'loc', 'getitem', 'iloc', None] # if we are not a slice, then we are done if not isinstance(key, slice): return key # validate iloc if kind == 'iloc': return slice(self._validate_indexer('slice', key.start, kind), self._validate_indexer('slice', key.stop, kind), self._validate_indexer('slice', key.step, kind)) # potentially cast the bounds to integers start, stop, step = key.start, key.stop, key.step # figure out if this is a positional indexer def is_int(v): return v is None or is_integer(v) is_null_slicer = start is None and stop is None is_index_slice = is_int(start) and is_int(stop) is_positional = is_index_slice and not self.is_integer() if kind == 'getitem': """ called from the getitem slicers, validate that we are in fact integers """ if self.is_integer() or is_index_slice: return slice(self._validate_indexer('slice', key.start, kind), self._validate_indexer('slice', key.stop, kind), self._validate_indexer('slice', key.step, kind)) # convert the slice to an indexer here # if we are mixed and have integers try: if is_positional and self.is_mixed(): # TODO: i, j are not used anywhere if start is not None: i = self.get_loc(start) # noqa if stop is not None: j = self.get_loc(stop) # noqa is_positional = False except KeyError: if self.inferred_type == 'mixed-integer-float': raise if is_null_slicer: indexer = key elif is_positional: indexer = key else: try: indexer = self.slice_indexer(start, stop, step, kind=kind) except Exception: if is_index_slice: if self.is_integer(): raise else: indexer = key else: raise return indexer def _convert_listlike_indexer(self, keyarr, kind=None): """ Parameters ---------- keyarr : list-like Indexer to convert. Returns ------- tuple (indexer, keyarr) indexer is an ndarray or None if cannot convert keyarr are tuple-safe keys """ if isinstance(keyarr, Index): keyarr = self._convert_index_indexer(keyarr) else: keyarr = self._convert_arr_indexer(keyarr) indexer = self._convert_list_indexer(keyarr, kind=kind) return indexer, keyarr _index_shared_docs['_convert_arr_indexer'] = """ Convert an array-like indexer to the appropriate dtype. Parameters ---------- keyarr : array-like Indexer to convert. Returns ------- converted_keyarr : array-like """ @Appender(_index_shared_docs['_convert_arr_indexer']) def _convert_arr_indexer(self, keyarr): keyarr = _asarray_tuplesafe(keyarr) return keyarr _index_shared_docs['_convert_index_indexer'] = """ Convert an Index indexer to the appropriate dtype. Parameters ---------- keyarr : Index (or sub-class) Indexer to convert. Returns ------- converted_keyarr : Index (or sub-class) """ @Appender(_index_shared_docs['_convert_index_indexer']) def _convert_index_indexer(self, keyarr): return keyarr _index_shared_docs['_convert_list_indexer'] = """ Convert a list-like indexer to the appropriate dtype. Parameters ---------- keyarr : Index (or sub-class) Indexer to convert. kind : iloc, ix, loc, optional Returns ------- positional indexer or None """ @Appender(_index_shared_docs['_convert_list_indexer']) def _convert_list_indexer(self, keyarr, kind=None): if (kind in [None, 'iloc', 'ix'] and is_integer_dtype(keyarr) and not self.is_floating() and not isinstance(keyarr, ABCPeriodIndex)): if self.inferred_type == 'mixed-integer': indexer = self.get_indexer(keyarr) if (indexer >= 0).all(): return indexer # missing values are flagged as -1 by get_indexer and negative # indices are already converted to positive indices in the # above if-statement, so the negative flags are changed to # values outside the range of indices so as to trigger an # IndexError in maybe_convert_indices indexer[indexer < 0] = len(self) from pandas.core.indexing import maybe_convert_indices return maybe_convert_indices(indexer, len(self)) elif not self.inferred_type == 'integer': keyarr = np.where(keyarr < 0, len(self) + keyarr, keyarr) return keyarr return None def _invalid_indexer(self, form, key): """ consistent invalid indexer message """ raise TypeError("cannot do {form} indexing on {klass} with these " "indexers [{key}] of {kind}".format( form=form, klass=type(self), key=key, kind=type(key))) def get_duplicates(self): from collections import defaultdict counter = defaultdict(lambda: 0) for k in self.values: counter[k] += 1 return sorted(k for k, v in compat.iteritems(counter) if v > 1) _get_duplicates = get_duplicates def _cleanup(self): self._engine.clear_mapping() @cache_readonly def _constructor(self): return type(self) @cache_readonly def _engine(self): # property, for now, slow to look up return self._engine_type(lambda: self._values, len(self)) def _validate_index_level(self, level): """ Validate index level. For single-level Index getting level number is a no-op, but some verification must be done like in MultiIndex. """ if isinstance(level, int): if level < 0 and level != -1: raise IndexError("Too many levels: Index has only 1 level," " %d is not a valid level number" % (level, )) elif level > 0: raise IndexError("Too many levels:" " Index has only 1 level, not %d" % (level + 1)) elif level != self.name: raise KeyError('Level %s must be same as name (%s)' % (level, self.name)) def _get_level_number(self, level): self._validate_index_level(level) return 0 @cache_readonly def inferred_type(self): """ return a string of the type inferred from the values """ return lib.infer_dtype(self) def _is_memory_usage_qualified(self): """ return a boolean if we need a qualified .info display """ return self.is_object() def is_type_compatible(self, kind): return kind == self.inferred_type @cache_readonly def is_all_dates(self): if self._data is None: return False return is_datetime_array(_ensure_object(self.values)) def __iter__(self): return iter(self.values) def __reduce__(self): d = dict(data=self._data) d.update(self._get_attributes_dict()) return _new_Index, (self.__class__, d), None def __setstate__(self, state): """Necessary for making this object picklable""" if isinstance(state, dict): self._data = state.pop('data') for k, v in compat.iteritems(state): setattr(self, k, v) elif isinstance(state, tuple): if len(state) == 2: nd_state, own_state = state data = np.empty(nd_state[1], dtype=nd_state[2]) np.ndarray.__setstate__(data, nd_state) self.name = own_state[0] else: # pragma: no cover data = np.empty(state) np.ndarray.__setstate__(data, state) self._data = data self._reset_identity() else: raise Exception("invalid pickle state") _unpickle_compat = __setstate__ def __nonzero__(self): raise ValueError("The truth value of a {0} is ambiguous. " "Use a.empty, a.bool(), a.item(), a.any() or a.all()." .format(self.__class__.__name__)) __bool__ = __nonzero__ _index_shared_docs['__contains__'] = """ return a boolean if this key is IN the index Parameters ---------- key : object Returns ------- boolean """ @Appender(_index_shared_docs['__contains__'] % _index_doc_kwargs) def __contains__(self, key): hash(key) try: return key in self._engine except (OverflowError, TypeError, ValueError): return False _index_shared_docs['contains'] = """ return a boolean if this key is IN the index Parameters ---------- key : object Returns ------- boolean """ @Appender(_index_shared_docs['contains'] % _index_doc_kwargs) def contains(self, key): hash(key) try: return key in self._engine except (TypeError, ValueError): return False def __hash__(self): raise TypeError("unhashable type: %r" % type(self).__name__) def __setitem__(self, key, value): raise TypeError("Index does not support mutable operations") def __getitem__(self, key): """ Override numpy.ndarray's __getitem__ method to work as desired. This function adds lists and Series as valid boolean indexers (ndarrays only supports ndarray with dtype=bool). If resulting ndim != 1, plain ndarray is returned instead of corresponding `Index` subclass. """ # There's no custom logic to be implemented in __getslice__, so it's # not overloaded intentionally. getitem = self._data.__getitem__ promote = self._shallow_copy if is_scalar(key): return getitem(key) if isinstance(key, slice): # This case is separated from the conditional above to avoid # pessimization of basic indexing. return promote(getitem(key)) if is_bool_indexer(key): key = np.asarray(key) key = _values_from_object(key) result = getitem(key) if not is_scalar(result): return promote(result) else: return result def append(self, other): """ Append a collection of Index options together Parameters ---------- other : Index or list/tuple of indices Returns ------- appended : Index """ to_concat = [self] if isinstance(other, (list, tuple)): to_concat = to_concat + list(other) else: to_concat.append(other) for obj in to_concat: if not isinstance(obj, Index): raise TypeError('all inputs must be Index') names = set([obj.name for obj in to_concat]) name = None if len(names) > 1 else self.name if self.is_categorical(): # if calling index is category, don't check dtype of others from pandas.core.indexes.category import CategoricalIndex return CategoricalIndex._append_same_dtype(self, to_concat, name) typs = _concat.get_dtype_kinds(to_concat) if len(typs) == 1: return self._append_same_dtype(to_concat, name=name) return _concat._concat_index_asobject(to_concat, name=name) def _append_same_dtype(self, to_concat, name): """ Concatenate to_concat which has the same class """ # must be overrided in specific classes return _concat._concat_index_asobject(to_concat, name) _index_shared_docs['take'] = """ return a new %(klass)s of the values selected by the indices For internal compatibility with numpy arrays. Parameters ---------- indices : list Indices to be taken axis : int, optional The axis over which to select values, always 0. allow_fill : bool, default True fill_value : bool, default None If allow_fill=True and fill_value is not None, indices specified by -1 is regarded as NA. If Index doesn't hold NA, raise ValueError See also -------- numpy.ndarray.take """ @Appender(_index_shared_docs['take'] % _index_doc_kwargs) def take(self, indices, axis=0, allow_fill=True, fill_value=None, **kwargs): if kwargs: nv.validate_take(tuple(), kwargs) indices = _ensure_platform_int(indices) if self._can_hold_na: taken = self._assert_take_fillable(self.values, indices, allow_fill=allow_fill, fill_value=fill_value, na_value=self._na_value) else: if allow_fill and fill_value is not None: msg = 'Unable to fill values because {0} cannot contain NA' raise ValueError(msg.format(self.__class__.__name__)) taken = self.values.take(indices) return self._shallow_copy(taken) def _assert_take_fillable(self, values, indices, allow_fill=True, fill_value=None, na_value=np.nan): """ Internal method to handle NA filling of take """ indices = _ensure_platform_int(indices) # only fill if we are passing a non-None fill_value if allow_fill and fill_value is not None: if (indices < -1).any(): msg = ('When allow_fill=True and fill_value is not None, ' 'all indices must be >= -1') raise ValueError(msg) taken = values.take(indices) mask = indices == -1 if mask.any(): taken[mask] = na_value else: taken = values.take(indices) return taken @cache_readonly def _isnan(self): """ return if each value is nan""" if self._can_hold_na: return isna(self) else: # shouldn't reach to this condition by checking hasnans beforehand values = np.empty(len(self), dtype=np.bool_) values.fill(False) return values @cache_readonly def _nan_idxs(self): if self._can_hold_na: w, = self._isnan.nonzero() return w else: return np.array([], dtype=np.int64) @cache_readonly def hasnans(self): """ return if I have any nans; enables various perf speedups """ if self._can_hold_na: return self._isnan.any() else: return False def isna(self): """ Detect missing values .. versionadded:: 0.20.0 Returns ------- a boolean array of whether my values are NA See also -------- isnull : alias of isna pandas.isna : top-level isna """ return self._isnan isnull = isna def notna(self): """ Inverse of isna .. versionadded:: 0.20.0 Returns ------- a boolean array of whether my values are not NA See also -------- notnull : alias of notna pandas.notna : top-level notna """ return ~self.isna() notnull = notna def putmask(self, mask, value): """ return a new Index of the values set with the mask See also -------- numpy.ndarray.putmask """ values = self.values.copy() try: np.putmask(values, mask, self._convert_for_op(value)) return self._shallow_copy(values) except (ValueError, TypeError): # coerces to object return self.astype(object).putmask(mask, value) def format(self, name=False, formatter=None, **kwargs): """ Render a string representation of the Index """ header = [] if name: header.append(pprint_thing(self.name, escape_chars=('\t', '\r', '\n')) if self.name is not None else '') if formatter is not None: return header + list(self.map(formatter)) return self._format_with_header(header, **kwargs) def _format_with_header(self, header, na_rep='NaN', **kwargs): values = self.values from pandas.io.formats.format import format_array if is_categorical_dtype(values.dtype): values = np.array(values) elif is_object_dtype(values.dtype): values = lib.maybe_convert_objects(values, safe=1) if is_object_dtype(values.dtype): result = [pprint_thing(x, escape_chars=('\t', '\r', '\n')) for x in values] # could have nans mask = isna(values) if mask.any(): result = np.array(result) result[mask] = na_rep result = result.tolist() else: result = _trim_front(format_array(values, None, justify='left')) return header + result def to_native_types(self, slicer=None, **kwargs): """ Format specified values of `self` and return them. Parameters ---------- slicer : int, array-like An indexer into `self` that specifies which values are used in the formatting process. kwargs : dict Options for specifying how the values should be formatted. These options include the following: 1) na_rep : str The value that serves as a placeholder for NULL values 2) quoting : bool or None Whether or not there are quoted values in `self` 3) date_format : str The format used to represent date-like values """ values = self if slicer is not None: values = values[slicer] return values._format_native_types(**kwargs) def _format_native_types(self, na_rep='', quoting=None, **kwargs): """ actually format my specific types """ mask = isna(self) if not self.is_object() and not quoting: values = np.asarray(self).astype(str) else: values = np.array(self, dtype=object, copy=True) values[mask] = na_rep return values def equals(self, other): """ Determines if two Index objects contain the same elements. """ if self.is_(other): return True if not isinstance(other, Index): return False if is_object_dtype(self) and not is_object_dtype(other): # if other is not object, use other's logic for coercion return other.equals(self) try: return array_equivalent(_values_from_object(self), _values_from_object(other)) except: return False def identical(self, other): """Similar to equals, but check that other comparable attributes are also equal """ return (self.equals(other) and all((getattr(self, c, None) == getattr(other, c, None) for c in self._comparables)) and type(self) == type(other)) def asof(self, label): """ For a sorted index, return the most recent label up to and including the passed label. Return NaN if not found. See also -------- get_loc : asof is a thin wrapper around get_loc with method='pad' """ try: loc = self.get_loc(label, method='pad') except KeyError: return _get_na_value(self.dtype) else: if isinstance(loc, slice): loc = loc.indices(len(self))[-1] return self[loc] def asof_locs(self, where, mask): """ where : array of timestamps mask : array of booleans where data is not NA """ locs = self.values[mask].searchsorted(where.values, side='right') locs = np.where(locs > 0, locs - 1, 0) result = np.arange(len(self))[mask].take(locs) first = mask.argmax() result[(locs == 0) & (where < self.values[first])] = -1 return result def sort_values(self, return_indexer=False, ascending=True): """ Return sorted copy of Index """ _as = self.argsort() if not ascending: _as = _as[::-1] sorted_index = self.take(_as) if return_indexer: return sorted_index, _as else: return sorted_index def sort(self, *args, **kwargs): raise TypeError("cannot sort an Index object in-place, use " "sort_values instead") def sortlevel(self, level=None, ascending=True, sort_remaining=None): """ For internal compatibility with with the Index API Sort the Index. This is for compat with MultiIndex Parameters ---------- ascending : boolean, default True False to sort in descending order level, sort_remaining are compat parameters Returns ------- sorted_index : Index """ return self.sort_values(return_indexer=True, ascending=ascending) def shift(self, periods=1, freq=None): """ Shift Index containing datetime objects by input number of periods and DateOffset Returns ------- shifted : Index """ raise NotImplementedError("Not supported for type %s" % type(self).__name__) def argsort(self, *args, **kwargs): """ Returns the indices that would sort the index and its underlying data. Returns ------- argsorted : numpy array See also -------- numpy.ndarray.argsort """ result = self.asi8 if result is None: result = np.array(self) return result.argsort(*args, **kwargs) def __add__(self, other): return Index(np.array(self) + other) def __radd__(self, other): return Index(other + np.array(self)) __iadd__ = __add__ def __sub__(self, other): raise TypeError("cannot perform __sub__ with this index type: " "{typ}".format(typ=type(self))) def __and__(self, other): return self.intersection(other) def __or__(self, other): return self.union(other) def __xor__(self, other): return self.symmetric_difference(other) def _get_consensus_name(self, other): """ Given 2 indexes, give a consensus name meaning we take the not None one, or None if the names differ. Return a new object if we are resetting the name """ if self.name != other.name: if self.name is None or other.name is None: name = self.name or other.name else: name = None if self.name != name: return self._shallow_copy(name=name) return self def union(self, other): """ Form the union of two Index objects and sorts if possible. Parameters ---------- other : Index or array-like Returns ------- union : Index Examples -------- >>> idx1 = pd.Index([1, 2, 3, 4]) >>> idx2 = pd.Index([3, 4, 5, 6]) >>> idx1.union(idx2) Int64Index([1, 2, 3, 4, 5, 6], dtype='int64') """ self._assert_can_do_setop(other) other = _ensure_index(other) if len(other) == 0 or self.equals(other): return self._get_consensus_name(other) if len(self) == 0: return other._get_consensus_name(self) if not is_dtype_equal(self.dtype, other.dtype): this = self.astype('O') other = other.astype('O') return this.union(other) if self.is_monotonic and other.is_monotonic: try: result = self._outer_indexer(self._values, other._values)[0] except TypeError: # incomparable objects result = list(self._values) # worth making this faster? a very unusual case value_set = set(self._values) result.extend([x for x in other._values if x not in value_set]) else: indexer = self.get_indexer(other) indexer, = (indexer == -1).nonzero() if len(indexer) > 0: other_diff = algos.take_nd(other._values, indexer, allow_fill=False) result = _concat._concat_compat((self._values, other_diff)) try: self._values[0] < other_diff[0] except TypeError as e: warnings.warn("%s, sort order is undefined for " "incomparable objects" % e, RuntimeWarning, stacklevel=3) else: types = frozenset((self.inferred_type, other.inferred_type)) if not types & _unsortable_types: result.sort() else: result = self._values try: result = np.sort(result) except TypeError as e: warnings.warn("%s, sort order is undefined for " "incomparable objects" % e, RuntimeWarning, stacklevel=3) # for subclasses return self._wrap_union_result(other, result) def _wrap_union_result(self, other, result): name = self.name if self.name == other.name else None return self.__class__(result, name=name) def intersection(self, other): """ Form the intersection of two Index objects. This returns a new Index with elements common to the index and `other`, preserving the order of the calling index. Parameters ---------- other : Index or array-like Returns ------- intersection : Index Examples -------- >>> idx1 = pd.Index([1, 2, 3, 4]) >>> idx2 = pd.Index([3, 4, 5, 6]) >>> idx1.intersection(idx2) Int64Index([3, 4], dtype='int64') """ self._assert_can_do_setop(other) other = _ensure_index(other) if self.equals(other): return self._get_consensus_name(other) if not is_dtype_equal(self.dtype, other.dtype): this = self.astype('O') other = other.astype('O') return this.intersection(other) if self.is_monotonic and other.is_monotonic: try: result = self._inner_indexer(self._values, other._values)[0] return self._wrap_union_result(other, result) except TypeError: pass try: indexer = Index(other._values).get_indexer(self._values) indexer = indexer.take((indexer != -1).nonzero()[0]) except: # duplicates indexer = algos.unique1d( Index(other._values).get_indexer_non_unique(self._values)[0]) indexer = indexer[indexer != -1] taken = other.take(indexer) if self.name != other.name: taken.name = None return taken def difference(self, other): """ Return a new Index with elements from the index that are not in `other`. This is the set difference of two Index objects. It's sorted if sorting is possible. Parameters ---------- other : Index or array-like Returns ------- difference : Index Examples -------- >>> idx1 = pd.Index([1, 2, 3, 4]) >>> idx2 = pd.Index([3, 4, 5, 6]) >>> idx1.difference(idx2) Int64Index([1, 2], dtype='int64') """ self._assert_can_do_setop(other) if self.equals(other): return Index([], name=self.name) other, result_name = self._convert_can_do_setop(other) this = self._get_unique_index() indexer = this.get_indexer(other) indexer = indexer.take((indexer != -1).nonzero()[0]) label_diff = np.setdiff1d(np.arange(this.size), indexer, assume_unique=True) the_diff = this.values.take(label_diff) try: the_diff = sorting.safe_sort(the_diff) except TypeError: pass return this._shallow_copy(the_diff, name=result_name, freq=None) def symmetric_difference(self, other, result_name=None): """ Compute the symmetric difference of two Index objects. It's sorted if sorting is possible. Parameters ---------- other : Index or array-like result_name : str Returns ------- symmetric_difference : Index Notes ----- ``symmetric_difference`` contains elements that appear in either ``idx1`` or ``idx2`` but not both. Equivalent to the Index created by ``idx1.difference(idx2) | idx2.difference(idx1)`` with duplicates dropped. Examples -------- >>> idx1 = Index([1, 2, 3, 4]) >>> idx2 = Index([2, 3, 4, 5]) >>> idx1.symmetric_difference(idx2) Int64Index([1, 5], dtype='int64') You can also use the ``^`` operator: >>> idx1 ^ idx2 Int64Index([1, 5], dtype='int64') """ self._assert_can_do_setop(other) other, result_name_update = self._convert_can_do_setop(other) if result_name is None: result_name = result_name_update this = self._get_unique_index() other = other._get_unique_index() indexer = this.get_indexer(other) # {this} minus {other} common_indexer = indexer.take((indexer != -1).nonzero()[0]) left_indexer = np.setdiff1d(np.arange(this.size), common_indexer, assume_unique=True) left_diff = this.values.take(left_indexer) # {other} minus {this} right_indexer = (indexer == -1).nonzero()[0] right_diff = other.values.take(right_indexer) the_diff = _concat._concat_compat([left_diff, right_diff]) try: the_diff = sorting.safe_sort(the_diff) except TypeError: pass attribs = self._get_attributes_dict() attribs['name'] = result_name if 'freq' in attribs: attribs['freq'] = None return self._shallow_copy_with_infer(the_diff, **attribs) def _get_unique_index(self, dropna=False): """ Returns an index containing unique values. Parameters ---------- dropna : bool If True, NaN values are dropped. Returns ------- uniques : index """ if self.is_unique and not dropna: return self values = self.values if not self.is_unique: values = self.unique() if dropna: try: if self.hasnans: values = values[~isna(values)] except NotImplementedError: pass return self._shallow_copy(values) _index_shared_docs['get_loc'] = """ Get integer location for requested label. Parameters ---------- key : label method : {None, 'pad'/'ffill', 'backfill'/'bfill', 'nearest'}, optional * default: exact matches only. * pad / ffill: find the PREVIOUS index value if no exact match. * backfill / bfill: use NEXT index value if no exact match * nearest: use the NEAREST index value if no exact match. Tied distances are broken by preferring the larger index value. tolerance : optional Maximum distance from index value for inexact matches. The value of the index at the matching location most satisfy the equation ``abs(index[loc] - key) <= tolerance``. .. versionadded:: 0.17.0 Returns ------- loc : int if unique index, possibly slice or mask if not """ @Appender(_index_shared_docs['get_loc']) def get_loc(self, key, method=None, tolerance=None): if method is None: if tolerance is not None: raise ValueError('tolerance argument only valid if using pad, ' 'backfill or nearest lookups') try: return self._engine.get_loc(key) except KeyError: return self._engine.get_loc(self._maybe_cast_indexer(key)) indexer = self.get_indexer([key], method=method, tolerance=tolerance) if indexer.ndim > 1 or indexer.size > 1: raise TypeError('get_loc requires scalar valued input') loc = indexer.item() if loc == -1: raise KeyError(key) return loc def get_value(self, series, key): """ Fast lookup of value from 1-dimensional ndarray. Only use this if you know what you're doing """ # if we have something that is Index-like, then # use this, e.g. DatetimeIndex s = getattr(series, '_values', None) if isinstance(s, Index) and is_scalar(key): try: return s[key] except (IndexError, ValueError): # invalid type as an indexer pass s = _values_from_object(series) k = _values_from_object(key) k = self._convert_scalar_indexer(k, kind='getitem') try: return self._engine.get_value(s, k, tz=getattr(series.dtype, 'tz', None)) except KeyError as e1: if len(self) > 0 and self.inferred_type in ['integer', 'boolean']: raise try: return libts.get_value_box(s, key) except IndexError: raise except TypeError: # generator/iterator-like if is_iterator(key): raise InvalidIndexError(key) else: raise e1 except Exception: # pragma: no cover raise e1 except TypeError: # python 3 if is_scalar(key): # pragma: no cover raise IndexError(key) raise InvalidIndexError(key) def set_value(self, arr, key, value): """ Fast lookup of value from 1-dimensional ndarray. Only use this if you know what you're doing """ self._engine.set_value(_values_from_object(arr), _values_from_object(key), value) def _get_level_values(self, level): """ Return an Index of values for requested level, equal to the length of the index Parameters ---------- level : int Returns ------- values : Index """ self._validate_index_level(level) return self get_level_values = _get_level_values _index_shared_docs['get_indexer'] = """ Compute indexer and mask for new index given the current index. The indexer should be then used as an input to ndarray.take to align the current data to the new index. Parameters ---------- target : %(target_klass)s method : {None, 'pad'/'ffill', 'backfill'/'bfill', 'nearest'}, optional * default: exact matches only. * pad / ffill: find the PREVIOUS index value if no exact match. * backfill / bfill: use NEXT index value if no exact match * nearest: use the NEAREST index value if no exact match. Tied distances are broken by preferring the larger index value. limit : int, optional Maximum number of consecutive labels in ``target`` to match for inexact matches. tolerance : optional Maximum distance between original and new labels for inexact matches. The values of the index at the matching locations most satisfy the equation ``abs(index[indexer] - target) <= tolerance``. .. versionadded:: 0.17.0 Examples -------- >>> indexer = index.get_indexer(new_index) >>> new_values = cur_values.take(indexer) Returns ------- indexer : ndarray of int Integers from 0 to n - 1 indicating that the index at these positions matches the corresponding target values. Missing values in the target are marked by -1. """ @Appender(_index_shared_docs['get_indexer'] % _index_doc_kwargs) def get_indexer(self, target, method=None, limit=None, tolerance=None): method = missing.clean_reindex_fill_method(method) target = _ensure_index(target) if tolerance is not None: tolerance = self._convert_tolerance(tolerance) pself, ptarget = self._maybe_promote(target) if pself is not self or ptarget is not target: return pself.get_indexer(ptarget, method=method, limit=limit, tolerance=tolerance) if not is_dtype_equal(self.dtype, target.dtype): this = self.astype(object) target = target.astype(object) return this.get_indexer(target, method=method, limit=limit, tolerance=tolerance) if not self.is_unique: raise InvalidIndexError('Reindexing only valid with uniquely' ' valued Index objects') if method == 'pad' or method == 'backfill': indexer = self._get_fill_indexer(target, method, limit, tolerance) elif method == 'nearest': indexer = self._get_nearest_indexer(target, limit, tolerance) else: if tolerance is not None: raise ValueError('tolerance argument only valid if doing pad, ' 'backfill or nearest reindexing') if limit is not None: raise ValueError('limit argument only valid if doing pad, ' 'backfill or nearest reindexing') indexer = self._engine.get_indexer(target._values) return _ensure_platform_int(indexer) def _convert_tolerance(self, tolerance): # override this method on subclasses return tolerance def _get_fill_indexer(self, target, method, limit=None, tolerance=None): if self.is_monotonic_increasing and target.is_monotonic_increasing: method = (self._engine.get_pad_indexer if method == 'pad' else self._engine.get_backfill_indexer) indexer = method(target._values, limit) else: indexer = self._get_fill_indexer_searchsorted(target, method, limit) if tolerance is not None: indexer = self._filter_indexer_tolerance(target._values, indexer, tolerance) return indexer def _get_fill_indexer_searchsorted(self, target, method, limit=None): """ Fallback pad/backfill get_indexer that works for monotonic decreasing indexes and non-monotonic targets """ if limit is not None: raise ValueError('limit argument for %r method only well-defined ' 'if index and target are monotonic' % method) side = 'left' if method == 'pad' else 'right' # find exact matches first (this simplifies the algorithm) indexer = self.get_indexer(target) nonexact = (indexer == -1) indexer[nonexact] = self._searchsorted_monotonic(target[nonexact], side) if side == 'left': # searchsorted returns "indices into a sorted array such that, # if the corresponding elements in v were inserted before the # indices, the order of a would be preserved". # Thus, we need to subtract 1 to find values to the left. indexer[nonexact] -= 1 # This also mapped not found values (values of 0 from # np.searchsorted) to -1, which conveniently is also our # sentinel for missing values else: # Mark indices to the right of the largest value as not found indexer[indexer == len(self)] = -1 return indexer def _get_nearest_indexer(self, target, limit, tolerance): """ Get the indexer for the nearest index labels; requires an index with values that can be subtracted from each other (e.g., not strings or tuples). """ left_indexer = self.get_indexer(target, 'pad', limit=limit) right_indexer = self.get_indexer(target, 'backfill', limit=limit) target = np.asarray(target) left_distances = abs(self.values[left_indexer] - target) right_distances = abs(self.values[right_indexer] - target) op = operator.lt if self.is_monotonic_increasing else operator.le indexer = np.where(op(left_distances, right_distances) | (right_indexer == -1), left_indexer, right_indexer) if tolerance is not None: indexer = self._filter_indexer_tolerance(target, indexer, tolerance) return indexer def _filter_indexer_tolerance(self, target, indexer, tolerance): distance = abs(self.values[indexer] - target) indexer = np.where(distance <= tolerance, indexer, -1) return indexer _index_shared_docs['get_indexer_non_unique'] = """ Compute indexer and mask for new index given the current index. The indexer should be then used as an input to ndarray.take to align the current data to the new index. Parameters ---------- target : %(target_klass)s Returns ------- indexer : ndarray of int Integers from 0 to n - 1 indicating that the index at these positions matches the corresponding target values. Missing values in the target are marked by -1. missing : ndarray of int An indexer into the target of the values not found. These correspond to the -1 in the indexer array """ @Appender(_index_shared_docs['get_indexer_non_unique'] % _index_doc_kwargs) def get_indexer_non_unique(self, target): target = _ensure_index(target) pself, ptarget = self._maybe_promote(target) if pself is not self or ptarget is not target: return pself.get_indexer_non_unique(ptarget) if self.is_all_dates: self = Index(self.asi8) tgt_values = target.asi8 else: tgt_values = target._values indexer, missing = self._engine.get_indexer_non_unique(tgt_values) return _ensure_platform_int(indexer), missing def get_indexer_for(self, target, **kwargs): """ guaranteed return of an indexer even when non-unique This dispatches to get_indexer or get_indexer_nonunique as appropriate """ if self.is_unique: return self.get_indexer(target, **kwargs) indexer, _ = self.get_indexer_non_unique(target, **kwargs) return indexer def _maybe_promote(self, other): # A hack, but it works from pandas.core.indexes.datetimes import DatetimeIndex if self.inferred_type == 'date' and isinstance(other, DatetimeIndex): return DatetimeIndex(self), other elif self.inferred_type == 'boolean': if not is_object_dtype(self.dtype): return self.astype('object'), other.astype('object') return self, other def groupby(self, values): """ Group the index labels by a given array of values. Parameters ---------- values : array Values used to determine the groups. Returns ------- groups : dict {group name -> group labels} """ # TODO: if we are a MultiIndex, we can do better # that converting to tuples from .multi import MultiIndex if isinstance(values, MultiIndex): values = values.values values = _ensure_categorical(values) result = values._reverse_indexer() # map to the label result = {k: self.take(v) for k, v in compat.iteritems(result)} return result def map(self, mapper): """Apply mapper function to an index. Parameters ---------- mapper : callable Function to be applied. Returns ------- applied : Union[Index, MultiIndex], inferred The output of the mapping function applied to the index. If the function returns a tuple with more than one element a MultiIndex will be returned. """ from .multi import MultiIndex mapped_values = self._arrmap(self.values, mapper) attributes = self._get_attributes_dict() if mapped_values.size and isinstance(mapped_values[0], tuple): return MultiIndex.from_tuples(mapped_values, names=attributes.get('name')) attributes['copy'] = False return Index(mapped_values, **attributes) def isin(self, values, level=None): """ Compute boolean array of whether each index value is found in the passed set of values. Parameters ---------- values : set or list-like Sought values. .. versionadded:: 0.18.1 Support for values as a set level : str or int, optional Name or position of the index level to use (if the index is a MultiIndex). Notes ----- If `level` is specified: - if it is the name of one *and only one* index level, use that level; - otherwise it should be a number indicating level position. Returns ------- is_contained : ndarray (boolean dtype) """ if level is not None: self._validate_index_level(level) return algos.isin(np.array(self), values) def _can_reindex(self, indexer): """ *this is an internal non-public method* Check if we are allowing reindexing with this particular indexer Parameters ---------- indexer : an integer indexer Raises ------ ValueError if its a duplicate axis """ # trying to reindex on an axis with duplicates if not self.is_unique and len(indexer): raise ValueError("cannot reindex from a duplicate axis") def reindex(self, target, method=None, level=None, limit=None, tolerance=None): """ Create index with target's values (move/add/delete values as necessary) Parameters ---------- target : an iterable Returns ------- new_index : pd.Index Resulting index indexer : np.ndarray or None Indices of output values in original index """ # GH6552: preserve names when reindexing to non-named target # (i.e. neither Index nor Series). preserve_names = not hasattr(target, 'name') # GH7774: preserve dtype/tz if target is empty and not an Index. target = _ensure_has_len(target) # target may be an iterator if not isinstance(target, Index) and len(target) == 0: attrs = self._get_attributes_dict() attrs.pop('freq', None) # don't preserve freq target = self._simple_new(None, dtype=self.dtype, **attrs) else: target = _ensure_index(target) if level is not None: if method is not None: raise TypeError('Fill method not supported if level passed') _, indexer, _ = self._join_level(target, level, how='right', return_indexers=True) else: if self.equals(target): indexer = None else: if self.is_unique: indexer = self.get_indexer(target, method=method, limit=limit, tolerance=tolerance) else: if method is not None or limit is not None: raise ValueError("cannot reindex a non-unique index " "with a method or limit") indexer, missing = self.get_indexer_non_unique(target) if preserve_names and target.nlevels == 1 and target.name != self.name: target = target.copy() target.name = self.name return target, indexer def _reindex_non_unique(self, target): """ *this is an internal non-public method* Create a new index with target's values (move/add/delete values as necessary) use with non-unique Index and a possibly non-unique target Parameters ---------- target : an iterable Returns ------- new_index : pd.Index Resulting index indexer : np.ndarray or None Indices of output values in original index """ target = _ensure_index(target) indexer, missing = self.get_indexer_non_unique(target) check = indexer != -1 new_labels = self.take(indexer[check]) new_indexer = None if len(missing): l = np.arange(len(indexer)) missing = _ensure_platform_int(missing) missing_labels = target.take(missing) missing_indexer = _ensure_int64(l[~check]) cur_labels = self.take(indexer[check]).values cur_indexer = _ensure_int64(l[check]) new_labels = np.empty(tuple([len(indexer)]), dtype=object) new_labels[cur_indexer] = cur_labels new_labels[missing_indexer] = missing_labels # a unique indexer if target.is_unique: # see GH5553, make sure we use the right indexer new_indexer = np.arange(len(indexer)) new_indexer[cur_indexer] = np.arange(len(cur_labels)) new_indexer[missing_indexer] = -1 # we have a non_unique selector, need to use the original # indexer here else: # need to retake to have the same size as the indexer indexer[~check] = 0 # reset the new indexer to account for the new size new_indexer = np.arange(len(self.take(indexer))) new_indexer[~check] = -1 new_index = self._shallow_copy_with_infer(new_labels, freq=None) return new_index, indexer, new_indexer _index_shared_docs['join'] = """ *this is an internal non-public method* Compute join_index and indexers to conform data structures to the new index. Parameters ---------- other : Index how : {'left', 'right', 'inner', 'outer'} level : int or level name, default None return_indexers : boolean, default False sort : boolean, default False Sort the join keys lexicographically in the result Index. If False, the order of the join keys depends on the join type (how keyword) .. versionadded:: 0.20.0 Returns ------- join_index, (left_indexer, right_indexer) """ @Appender(_index_shared_docs['join']) def join(self, other, how='left', level=None, return_indexers=False, sort=False): from .multi import MultiIndex self_is_mi = isinstance(self, MultiIndex) other_is_mi = isinstance(other, MultiIndex) # try to figure out the join level # GH3662 if level is None and (self_is_mi or other_is_mi): # have the same levels/names so a simple join if self.names == other.names: pass else: return self._join_multi(other, how=how, return_indexers=return_indexers) # join on the level if level is not None and (self_is_mi or other_is_mi): return self._join_level(other, level, how=how, return_indexers=return_indexers) other = _ensure_index(other) if len(other) == 0 and how in ('left', 'outer'): join_index = self._shallow_copy() if return_indexers: rindexer = np.repeat(-1, len(join_index)) return join_index, None, rindexer else: return join_index if len(self) == 0 and how in ('right', 'outer'): join_index = other._shallow_copy() if return_indexers: lindexer = np.repeat(-1, len(join_index)) return join_index, lindexer, None else: return join_index if self._join_precedence < other._join_precedence: how = {'right': 'left', 'left': 'right'}.get(how, how) result = other.join(self, how=how, level=level, return_indexers=return_indexers) if return_indexers: x, y, z = result result = x, z, y return result if not is_dtype_equal(self.dtype, other.dtype): this = self.astype('O') other = other.astype('O') return this.join(other, how=how, return_indexers=return_indexers) _validate_join_method(how) if not self.is_unique and not other.is_unique: return self._join_non_unique(other, how=how, return_indexers=return_indexers) elif not self.is_unique or not other.is_unique: if self.is_monotonic and other.is_monotonic: return self._join_monotonic(other, how=how, return_indexers=return_indexers) else: return self._join_non_unique(other, how=how, return_indexers=return_indexers) elif self.is_monotonic and other.is_monotonic: try: return self._join_monotonic(other, how=how, return_indexers=return_indexers) except TypeError: pass if how == 'left': join_index = self elif how == 'right': join_index = other elif how == 'inner': join_index = self.intersection(other) elif how == 'outer': join_index = self.union(other) if sort: join_index = join_index.sort_values() if return_indexers: if join_index is self: lindexer = None else: lindexer = self.get_indexer(join_index) if join_index is other: rindexer = None else: rindexer = other.get_indexer(join_index) return join_index, lindexer, rindexer else: return join_index def _join_multi(self, other, how, return_indexers=True): from .multi import MultiIndex self_is_mi = isinstance(self, MultiIndex) other_is_mi = isinstance(other, MultiIndex) # figure out join names self_names = [n for n in self.names if n is not None] other_names = [n for n in other.names if n is not None] overlap = list(set(self_names) & set(other_names)) # need at least 1 in common, but not more than 1 if not len(overlap): raise ValueError("cannot join with no level specified and no " "overlapping names") if len(overlap) > 1: raise NotImplementedError("merging with more than one level " "overlap on a multi-index is not " "implemented") jl = overlap[0] # make the indices into mi's that match if not (self_is_mi and other_is_mi): flip_order = False if self_is_mi: self, other = other, self flip_order = True # flip if join method is right or left how = {'right': 'left', 'left': 'right'}.get(how, how) level = other.names.index(jl) result = self._join_level(other, level, how=how, return_indexers=return_indexers) if flip_order: if isinstance(result, tuple): return result[0], result[2], result[1] return result # 2 multi-indexes raise NotImplementedError("merging with both multi-indexes is not " "implemented") def _join_non_unique(self, other, how='left', return_indexers=False): from pandas.core.reshape.merge import _get_join_indexers left_idx, right_idx = _get_join_indexers([self._values], [other._values], how=how, sort=True) left_idx = _ensure_platform_int(left_idx) right_idx = _ensure_platform_int(right_idx) join_index = np.asarray(self._values.take(left_idx)) mask = left_idx == -1 np.putmask(join_index, mask, other._values.take(right_idx)) join_index = self._wrap_joined_index(join_index, other) if return_indexers: return join_index, left_idx, right_idx else: return join_index def _join_level(self, other, level, how='left', return_indexers=False, keep_order=True): """ The join method *only* affects the level of the resulting MultiIndex. Otherwise it just exactly aligns the Index data to the labels of the level in the MultiIndex. If `keep_order` == True, the order of the data indexed by the MultiIndex will not be changed; otherwise, it will tie out with `other`. """ from .multi import MultiIndex def _get_leaf_sorter(labels): """ returns sorter for the inner most level while preserving the order of higher levels """ if labels[0].size == 0: return np.empty(0, dtype='int64') if len(labels) == 1: lab = _ensure_int64(labels[0]) sorter, _ = libalgos.groupsort_indexer(lab, 1 + lab.max()) return sorter # find indexers of begining of each set of # same-key labels w.r.t all but last level tic = labels[0][:-1] != labels[0][1:] for lab in labels[1:-1]: tic |= lab[:-1] != lab[1:] starts = np.hstack(([True], tic, [True])).nonzero()[0] lab = _ensure_int64(labels[-1]) return lib.get_level_sorter(lab, _ensure_int64(starts)) if isinstance(self, MultiIndex) and isinstance(other, MultiIndex): raise TypeError('Join on level between two MultiIndex objects ' 'is ambiguous') left, right = self, other flip_order = not isinstance(self, MultiIndex) if flip_order: left, right = right, left how = {'right': 'left', 'left': 'right'}.get(how, how) level = left._get_level_number(level) old_level = left.levels[level] if not right.is_unique: raise NotImplementedError('Index._join_level on non-unique index ' 'is not implemented') new_level, left_lev_indexer, right_lev_indexer = \ old_level.join(right, how=how, return_indexers=True) if left_lev_indexer is None: if keep_order or len(left) == 0: left_indexer = None join_index = left else: # sort the leaves left_indexer = _get_leaf_sorter(left.labels[:level + 1]) join_index = left[left_indexer] else: left_lev_indexer = _ensure_int64(left_lev_indexer) rev_indexer = lib.get_reverse_indexer(left_lev_indexer, len(old_level)) new_lev_labels = algos.take_nd(rev_indexer, left.labels[level], allow_fill=False) new_labels = list(left.labels) new_labels[level] = new_lev_labels new_levels = list(left.levels) new_levels[level] = new_level if keep_order: # just drop missing values. o.w. keep order left_indexer = np.arange(len(left), dtype=np.intp) mask = new_lev_labels != -1 if not mask.all(): new_labels = [lab[mask] for lab in new_labels] left_indexer = left_indexer[mask] else: # tie out the order with other if level == 0: # outer most level, take the fast route ngroups = 1 + new_lev_labels.max() left_indexer, counts = libalgos.groupsort_indexer( new_lev_labels, ngroups) # missing values are placed first; drop them! left_indexer = left_indexer[counts[0]:] new_labels = [lab[left_indexer] for lab in new_labels] else: # sort the leaves mask = new_lev_labels != -1 mask_all = mask.all() if not mask_all: new_labels = [lab[mask] for lab in new_labels] left_indexer = _get_leaf_sorter(new_labels[:level + 1]) new_labels = [lab[left_indexer] for lab in new_labels] # left_indexers are w.r.t masked frame. # reverse to original frame! if not mask_all: left_indexer = mask.nonzero()[0][left_indexer] join_index = MultiIndex(levels=new_levels, labels=new_labels, names=left.names, verify_integrity=False) if right_lev_indexer is not None: right_indexer = algos.take_nd(right_lev_indexer, join_index.labels[level], allow_fill=False) else: right_indexer = join_index.labels[level] if flip_order: left_indexer, right_indexer = right_indexer, left_indexer if return_indexers: left_indexer = (None if left_indexer is None else _ensure_platform_int(left_indexer)) right_indexer = (None if right_indexer is None else _ensure_platform_int(right_indexer)) return join_index, left_indexer, right_indexer else: return join_index def _join_monotonic(self, other, how='left', return_indexers=False): if self.equals(other): ret_index = other if how == 'right' else self if return_indexers: return ret_index, None, None else: return ret_index sv = self._values ov = other._values if self.is_unique and other.is_unique: # We can perform much better than the general case if how == 'left': join_index = self lidx = None ridx = self._left_indexer_unique(sv, ov) elif how == 'right': join_index = other lidx = self._left_indexer_unique(ov, sv) ridx = None elif how == 'inner': join_index, lidx, ridx = self._inner_indexer(sv, ov) join_index = self._wrap_joined_index(join_index, other) elif how == 'outer': join_index, lidx, ridx = self._outer_indexer(sv, ov) join_index = self._wrap_joined_index(join_index, other) else: if how == 'left': join_index, lidx, ridx = self._left_indexer(sv, ov) elif how == 'right': join_index, ridx, lidx = self._left_indexer(ov, sv) elif how == 'inner': join_index, lidx, ridx = self._inner_indexer(sv, ov) elif how == 'outer': join_index, lidx, ridx = self._outer_indexer(sv, ov) join_index = self._wrap_joined_index(join_index, other) if return_indexers: lidx = None if lidx is None else _ensure_platform_int(lidx) ridx = None if ridx is None else _ensure_platform_int(ridx) return join_index, lidx, ridx else: return join_index def _wrap_joined_index(self, joined, other): name = self.name if self.name == other.name else None return Index(joined, name=name) def _get_string_slice(self, key, use_lhs=True, use_rhs=True): # this is for partial string indexing, # overridden in DatetimeIndex, TimedeltaIndex and PeriodIndex raise NotImplementedError def slice_indexer(self, start=None, end=None, step=None, kind=None): """ For an ordered Index, compute the slice indexer for input labels and step Parameters ---------- start : label, default None If None, defaults to the beginning end : label, default None If None, defaults to the end step : int, default None kind : string, default None Returns ------- indexer : ndarray or slice Notes ----- This function assumes that the data is sorted, so use at your own peril """ start_slice, end_slice = self.slice_locs(start, end, step=step, kind=kind) # return a slice if not is_scalar(start_slice): raise AssertionError("Start slice bound is non-scalar") if not is_scalar(end_slice): raise AssertionError("End slice bound is non-scalar") return slice(start_slice, end_slice, step) def _maybe_cast_indexer(self, key): """ If we have a float key and are not a floating index then try to cast to an int if equivalent """ if is_float(key) and not self.is_floating(): try: ckey = int(key) if ckey == key: key = ckey except (OverflowError, ValueError, TypeError): pass return key def _validate_indexer(self, form, key, kind): """ if we are positional indexer validate that we have appropriate typed bounds must be an integer """ assert kind in ['ix', 'loc', 'getitem', 'iloc'] if key is None: pass elif is_integer(key): pass elif kind in ['iloc', 'getitem']: self._invalid_indexer(form, key) return key _index_shared_docs['_maybe_cast_slice_bound'] = """ This function should be overloaded in subclasses that allow non-trivial casting on label-slice bounds, e.g. datetime-like indices allowing strings containing formatted datetimes. Parameters ---------- label : object side : {'left', 'right'} kind : {'ix', 'loc', 'getitem'} Returns ------- label : object Notes ----- Value of `side` parameter should be validated in caller. """ @Appender(_index_shared_docs['_maybe_cast_slice_bound']) def _maybe_cast_slice_bound(self, label, side, kind): assert kind in ['ix', 'loc', 'getitem', None] # We are a plain index here (sub-class override this method if they # wish to have special treatment for floats/ints, e.g. Float64Index and # datetimelike Indexes # reject them if is_float(label): if not (kind in ['ix'] and (self.holds_integer() or self.is_floating())): self._invalid_indexer('slice', label) # we are trying to find integer bounds on a non-integer based index # this is rejected (generally .loc gets you here) elif is_integer(label): self._invalid_indexer('slice', label) return label def _searchsorted_monotonic(self, label, side='left'): if self.is_monotonic_increasing: return self.searchsorted(label, side=side) elif self.is_monotonic_decreasing: # np.searchsorted expects ascending sort order, have to reverse # everything for it to work (element ordering, search side and # resulting value). pos = self[::-1].searchsorted(label, side='right' if side == 'left' else 'right') return len(self) - pos raise ValueError('index must be monotonic increasing or decreasing') def _get_loc_only_exact_matches(self, key): """ This is overriden on subclasses (namely, IntervalIndex) to control get_slice_bound. """ return self.get_loc(key) def get_slice_bound(self, label, side, kind): """ Calculate slice bound that corresponds to given label. Returns leftmost (one-past-the-rightmost if ``side=='right'``) position of given label. Parameters ---------- label : object side : {'left', 'right'} kind : {'ix', 'loc', 'getitem'} """ assert kind in ['ix', 'loc', 'getitem', None] if side not in ('left', 'right'): raise ValueError("Invalid value for side kwarg," " must be either 'left' or 'right': %s" % (side, )) original_label = label # For datetime indices label may be a string that has to be converted # to datetime boundary according to its resolution. label = self._maybe_cast_slice_bound(label, side, kind) # we need to look up the label try: slc = self._get_loc_only_exact_matches(label) except KeyError as err: try: return self._searchsorted_monotonic(label, side) except ValueError: # raise the original KeyError raise err if isinstance(slc, np.ndarray): # get_loc may return a boolean array or an array of indices, which # is OK as long as they are representable by a slice. if is_bool_dtype(slc): slc = lib.maybe_booleans_to_slice(slc.view('u1')) else: slc = lib.maybe_indices_to_slice(slc.astype('i8'), len(self)) if isinstance(slc, np.ndarray): raise KeyError("Cannot get %s slice bound for non-unique " "label: %r" % (side, original_label)) if isinstance(slc, slice): if side == 'left': return slc.start else: return slc.stop else: if side == 'right': return slc + 1 else: return slc def slice_locs(self, start=None, end=None, step=None, kind=None): """ Compute slice locations for input labels. Parameters ---------- start : label, default None If None, defaults to the beginning end : label, default None If None, defaults to the end step : int, defaults None If None, defaults to 1 kind : {'ix', 'loc', 'getitem'} or None Returns ------- start, end : int """ inc = (step is None or step >= 0) if not inc: # If it's a reverse slice, temporarily swap bounds. start, end = end, start start_slice = None if start is not None: start_slice = self.get_slice_bound(start, 'left', kind) if start_slice is None: start_slice = 0 end_slice = None if end is not None: end_slice = self.get_slice_bound(end, 'right', kind) if end_slice is None: end_slice = len(self) if not inc: # Bounds at this moment are swapped, swap them back and shift by 1. # # slice_locs('B', 'A', step=-1): s='B', e='A' # # s='A' e='B' # AFTER SWAP: | | # v ------------------> V # ----------------------------------- # | | |A|A|A|A| | | | | |B|B| | | | | # ----------------------------------- # ^ <------------------ ^ # SHOULD BE: | | # end=s-1 start=e-1 # end_slice, start_slice = start_slice - 1, end_slice - 1 # i == -1 triggers ``len(self) + i`` selection that points to the # last element, not before-the-first one, subtracting len(self) # compensates that. if end_slice == -1: end_slice -= len(self) if start_slice == -1: start_slice -= len(self) return start_slice, end_slice def delete(self, loc): """ Make new Index with passed location(-s) deleted Returns ------- new_index : Index """ return self._shallow_copy(np.delete(self._data, loc)) def insert(self, loc, item): """ Make new Index inserting new item at location. Follows Python list.append semantics for negative values Parameters ---------- loc : int item : object Returns ------- new_index : Index """ _self = np.asarray(self) item = self._coerce_scalar_to_index(item)._values idx = np.concatenate((_self[:loc], item, _self[loc:])) return self._shallow_copy_with_infer(idx) def drop(self, labels, errors='raise'): """ Make new Index with passed list of labels deleted Parameters ---------- labels : array-like errors : {'ignore', 'raise'}, default 'raise' If 'ignore', suppress error and existing labels are dropped. Returns ------- dropped : Index """ labels = com._index_labels_to_array(labels) indexer = self.get_indexer(labels) mask = indexer == -1 if mask.any(): if errors != 'ignore': raise ValueError('labels %s not contained in axis' % labels[mask]) indexer = indexer[~mask] return self.delete(indexer) @Appender(base._shared_docs['unique'] % _index_doc_kwargs) def unique(self): result = super(Index, self).unique() return self._shallow_copy(result) @Appender(base._shared_docs['drop_duplicates'] % _index_doc_kwargs) def drop_duplicates(self, keep='first'): return super(Index, self).drop_duplicates(keep=keep) @Appender(base._shared_docs['duplicated'] % _index_doc_kwargs) def duplicated(self, keep='first'): return super(Index, self).duplicated(keep=keep) _index_shared_docs['fillna'] = """ Fill NA/NaN values with the specified value Parameters ---------- value : scalar Scalar value to use to fill holes (e.g. 0). This value cannot be a list-likes. downcast : dict, default is None a dict of item->dtype of what to downcast if possible, or the string 'infer' which will try to downcast to an appropriate equal type (e.g. float64 to int64 if possible) Returns ------- filled : %(klass)s """ @Appender(_index_shared_docs['fillna']) def fillna(self, value=None, downcast=None): self._assert_can_do_op(value) if self.hasnans: result = self.putmask(self._isnan, value) if downcast is None: # no need to care metadata other than name # because it can't have freq if return Index(result, name=self.name) return self._shallow_copy() _index_shared_docs['dropna'] = """ Return Index without NA/NaN values Parameters ---------- how : {'any', 'all'}, default 'any' If the Index is a MultiIndex, drop the value when any or all levels are NaN. Returns ------- valid : Index """ @Appender(_index_shared_docs['dropna']) def dropna(self, how='any'): if how not in ('any', 'all'): raise ValueError("invalid how option: {0}".format(how)) if self.hasnans: return self._shallow_copy(self.values[~self._isnan]) return self._shallow_copy() def _evaluate_with_timedelta_like(self, other, op, opstr): raise TypeError("can only perform ops with timedelta like values") def _evaluate_with_datetime_like(self, other, op, opstr): raise TypeError("can only perform ops with datetime like values") def _evalute_compare(self, op): raise base.AbstractMethodError(self) @classmethod def _add_comparison_methods(cls): """ add in comparison methods """ def _make_compare(op): def _evaluate_compare(self, other): if isinstance(other, (np.ndarray, Index, ABCSeries)): if other.ndim > 0 and len(self) != len(other): raise ValueError('Lengths must match to compare') # we may need to directly compare underlying # representations if needs_i8_conversion(self) and needs_i8_conversion(other): return self._evaluate_compare(other, op) if (is_object_dtype(self) and self.nlevels == 1): # don't pass MultiIndex with np.errstate(all='ignore'): result = _comp_method_OBJECT_ARRAY( op, self.values, other) else: with np.errstate(all='ignore'): result = op(self.values, np.asarray(other)) # technically we could support bool dtyped Index # for now just return the indexing array directly if is_bool_dtype(result): return result try: return Index(result) except TypeError: return result return _evaluate_compare cls.__eq__ = _make_compare(operator.eq) cls.__ne__ = _make_compare(operator.ne) cls.__lt__ = _make_compare(operator.lt) cls.__gt__ = _make_compare(operator.gt) cls.__le__ = _make_compare(operator.le) cls.__ge__ = _make_compare(operator.ge) @classmethod def _add_numeric_methods_add_sub_disabled(cls): """ add in the numeric add/sub methods to disable """ def _make_invalid_op(name): def invalid_op(self, other=None): raise TypeError("cannot perform {name} with this index type: " "{typ}".format(name=name, typ=type(self))) invalid_op.__name__ = name return invalid_op cls.__add__ = cls.__radd__ = __iadd__ = _make_invalid_op('__add__') # noqa cls.__sub__ = __isub__ = _make_invalid_op('__sub__') # noqa @classmethod def _add_numeric_methods_disabled(cls): """ add in numeric methods to disable other than add/sub """ def _make_invalid_op(name): def invalid_op(self, other=None): raise TypeError("cannot perform {name} with this index type: " "{typ}".format(name=name, typ=type(self))) invalid_op.__name__ = name return invalid_op cls.__pow__ = cls.__rpow__ = _make_invalid_op('__pow__') cls.__mul__ = cls.__rmul__ = _make_invalid_op('__mul__') cls.__floordiv__ = cls.__rfloordiv__ = _make_invalid_op('__floordiv__') cls.__truediv__ = cls.__rtruediv__ = _make_invalid_op('__truediv__') if not compat.PY3: cls.__div__ = cls.__rdiv__ = _make_invalid_op('__div__') cls.__neg__ = _make_invalid_op('__neg__') cls.__pos__ = _make_invalid_op('__pos__') cls.__abs__ = _make_invalid_op('__abs__') cls.__inv__ = _make_invalid_op('__inv__') def _maybe_update_attributes(self, attrs): """ Update Index attributes (e.g. freq) depending on op """ return attrs def _validate_for_numeric_unaryop(self, op, opstr): """ validate if we can perform a numeric unary operation """ if not self._is_numeric_dtype: raise TypeError("cannot evaluate a numeric op " "{opstr} for type: {typ}".format( opstr=opstr, typ=type(self)) ) def _validate_for_numeric_binop(self, other, op, opstr): """ return valid other, evaluate or raise TypeError if we are not of the appropriate type internal method called by ops """ # if we are an inheritor of numeric, # but not actually numeric (e.g. DatetimeIndex/PeriodInde) if not self._is_numeric_dtype: raise TypeError("cannot evaluate a numeric op {opstr} " "for type: {typ}".format( opstr=opstr, typ=type(self)) ) if isinstance(other, Index): if not other._is_numeric_dtype: raise TypeError("cannot evaluate a numeric op " "{opstr} with type: {typ}".format( opstr=type(self), typ=type(other)) ) elif isinstance(other, np.ndarray) and not other.ndim: other = other.item() if isinstance(other, (Index, ABCSeries, np.ndarray)): if len(self) != len(other): raise ValueError("cannot evaluate a numeric op with " "unequal lengths") other = _values_from_object(other) if other.dtype.kind not in ['f', 'i', 'u']: raise TypeError("cannot evaluate a numeric op " "with a non-numeric dtype") elif isinstance(other, (ABCDateOffset, np.timedelta64, Timedelta, datetime.timedelta)): # higher up to handle pass elif isinstance(other, (Timestamp, np.datetime64)): # higher up to handle pass else: if not (is_float(other) or is_integer(other)): raise TypeError("can only perform ops with scalar values") return other @classmethod def _add_numeric_methods_binary(cls): """ add in numeric methods """ def _make_evaluate_binop(op, opstr, reversed=False, constructor=Index): def _evaluate_numeric_binop(self, other): other = self._validate_for_numeric_binop(other, op, opstr) # handle time-based others if isinstance(other, (ABCDateOffset, np.timedelta64, Timedelta, datetime.timedelta)): return self._evaluate_with_timedelta_like(other, op, opstr) elif isinstance(other, (Timestamp, np.datetime64)): return self._evaluate_with_datetime_like(other, op, opstr) # if we are a reversed non-communative op values = self.values if reversed: values, other = other, values attrs = self._get_attributes_dict() attrs = self._maybe_update_attributes(attrs) with np.errstate(all='ignore'): result = op(values, other) return constructor(result, **attrs) return _evaluate_numeric_binop cls.__add__ = cls.__radd__ = _make_evaluate_binop( operator.add, '__add__') cls.__sub__ = _make_evaluate_binop( operator.sub, '__sub__') cls.__rsub__ = _make_evaluate_binop( operator.sub, '__sub__', reversed=True) cls.__mul__ = cls.__rmul__ = _make_evaluate_binop( operator.mul, '__mul__') cls.__rpow__ = _make_evaluate_binop( operator.pow, '__pow__', reversed=True) cls.__pow__ = _make_evaluate_binop( operator.pow, '__pow__') cls.__mod__ = _make_evaluate_binop( operator.mod, '__mod__') cls.__floordiv__ = _make_evaluate_binop( operator.floordiv, '__floordiv__') cls.__rfloordiv__ = _make_evaluate_binop( operator.floordiv, '__floordiv__', reversed=True) cls.__truediv__ = _make_evaluate_binop( operator.truediv, '__truediv__') cls.__rtruediv__ = _make_evaluate_binop( operator.truediv, '__truediv__', reversed=True) if not compat.PY3: cls.__div__ = _make_evaluate_binop( operator.div, '__div__') cls.__rdiv__ = _make_evaluate_binop( operator.div, '__div__', reversed=True) cls.__divmod__ = _make_evaluate_binop( divmod, '__divmod__', constructor=lambda result, **attrs: ( Index(result[0], **attrs), Index(result[1], **attrs), ), ) @classmethod def _add_numeric_methods_unary(cls): """ add in numeric unary methods """ def _make_evaluate_unary(op, opstr): def _evaluate_numeric_unary(self): self._validate_for_numeric_unaryop(op, opstr) attrs = self._get_attributes_dict() attrs = self._maybe_update_attributes(attrs) return Index(op(self.values), **attrs) return _evaluate_numeric_unary cls.__neg__ = _make_evaluate_unary(lambda x: -x, '__neg__') cls.__pos__ = _make_evaluate_unary(lambda x: x, '__pos__') cls.__abs__ = _make_evaluate_unary(np.abs, '__abs__') cls.__inv__ = _make_evaluate_unary(lambda x: -x, '__inv__') @classmethod def _add_numeric_methods(cls): cls._add_numeric_methods_unary() cls._add_numeric_methods_binary() @classmethod def _add_logical_methods(cls): """ add in logical methods """ _doc = """ %(desc)s Parameters ---------- All arguments to numpy.%(outname)s are accepted. Returns ------- %(outname)s : bool or array_like (if axis is specified) A single element array_like may be converted to bool.""" def _make_logical_function(name, desc, f): @Substitution(outname=name, desc=desc) @Appender(_doc) def logical_func(self, *args, **kwargs): result = f(self.values) if (isinstance(result, (np.ndarray, ABCSeries, Index)) and result.ndim == 0): # return NumPy type return result.dtype.type(result.item()) else: # pragma: no cover return result logical_func.__name__ = name return logical_func cls.all = _make_logical_function('all', 'Return whether all elements ' 'are True', np.all) cls.any = _make_logical_function('any', 'Return whether any element is True', np.any) @classmethod def _add_logical_methods_disabled(cls): """ add in logical methods to disable """ def _make_invalid_op(name): def invalid_op(self, other=None): raise TypeError("cannot perform {name} with this index type: " "{typ}".format(name=name, typ=type(self))) invalid_op.__name__ = name return invalid_op cls.all = _make_invalid_op('all') cls.any = _make_invalid_op('any') Index._add_numeric_methods_disabled() Index._add_logical_methods() Index._add_comparison_methods() def _ensure_index(index_like, copy=False): if isinstance(index_like, Index): if copy: index_like = index_like.copy() return index_like if hasattr(index_like, 'name'): return Index(index_like, name=index_like.name, copy=copy) # must check for exactly list here because of strict type # check in clean_index_list if isinstance(index_like, list): if type(index_like) != list: index_like = list(index_like) converted, all_arrays = lib.clean_index_list(index_like) if len(converted) > 0 and all_arrays: from .multi import MultiIndex return MultiIndex.from_arrays(converted) else: index_like = converted else: # clean_index_list does the equivalent of copying # so only need to do this if not list instance if copy: from copy import copy index_like = copy(index_like) return Index(index_like) def _get_na_value(dtype): if is_datetime64_any_dtype(dtype) or is_timedelta64_dtype(dtype): return libts.NaT return {np.datetime64: libts.NaT, np.timedelta64: libts.NaT}.get(dtype, np.nan) def _ensure_has_len(seq): """If seq is an iterator, put its values into a list.""" try: len(seq) except TypeError: return list(seq) else: return seq def _trim_front(strings): """ Trims zeros and decimal points """ trimmed = strings while len(strings) > 0 and all([x[0] == ' ' for x in trimmed]): trimmed = [x[1:] for x in trimmed] return trimmed def _validate_join_method(method): if method not in ['left', 'right', 'inner', 'outer']: raise ValueError('do not recognize join method %s' % method)
# -*- coding: utf-8 -*- from __future__ import print_function import pytest from datetime import datetime from numpy import random import numpy as np from pandas.compat import lrange, lzip, u from pandas import (compat, DataFrame, Series, Index, MultiIndex, date_range, isna) import pandas as pd from pandas.util.testing import assert_frame_equal from pandas.errors import PerformanceWarning import pandas.util.testing as tm from pandas.tests.frame.common import TestData class TestDataFrameSelectReindex(TestData): # These are specific reindex-based tests; other indexing tests should go in # test_indexing def test_drop_names(self): df = DataFrame([[1, 2, 3], [3, 4, 5], [5, 6, 7]], index=['a', 'b', 'c'], columns=['d', 'e', 'f']) df.index.name, df.columns.name = 'first', 'second' df_dropped_b = df.drop('b') df_dropped_e = df.drop('e', axis=1) df_inplace_b, df_inplace_e = df.copy(), df.copy() df_inplace_b.drop('b', inplace=True) df_inplace_e.drop('e', axis=1, inplace=True) for obj in (df_dropped_b, df_dropped_e, df_inplace_b, df_inplace_e): assert obj.index.name == 'first' assert obj.columns.name == 'second' assert list(df.columns) == ['d', 'e', 'f'] pytest.raises(ValueError, df.drop, ['g']) pytest.raises(ValueError, df.drop, ['g'], 1) # errors = 'ignore' dropped = df.drop(['g'], errors='ignore') expected = Index(['a', 'b', 'c'], name='first') tm.assert_index_equal(dropped.index, expected) dropped = df.drop(['b', 'g'], errors='ignore') expected = Index(['a', 'c'], name='first') tm.assert_index_equal(dropped.index, expected) dropped = df.drop(['g'], axis=1, errors='ignore') expected = Index(['d', 'e', 'f'], name='second') tm.assert_index_equal(dropped.columns, expected) dropped = df.drop(['d', 'g'], axis=1, errors='ignore') expected = Index(['e', 'f'], name='second') tm.assert_index_equal(dropped.columns, expected) # GH 16398 dropped = df.drop([], errors='ignore') expected = Index(['a', 'b', 'c'], name='first') tm.assert_index_equal(dropped.index, expected) def test_drop_col_still_multiindex(self): arrays = [['a', 'b', 'c', 'top'], ['', '', '', 'OD'], ['', '', '', 'wx']] tuples = sorted(zip(*arrays)) index = MultiIndex.from_tuples(tuples) df = DataFrame(np.random.randn(3, 4), columns=index) del df[('a', '', '')] assert(isinstance(df.columns, MultiIndex)) def test_drop(self): simple = DataFrame({"A": [1, 2, 3, 4], "B": [0, 1, 2, 3]}) assert_frame_equal(simple.drop("A", axis=1), simple[['B']]) assert_frame_equal(simple.drop(["A", "B"], axis='columns'), simple[[]]) assert_frame_equal(simple.drop([0, 1, 3], axis=0), simple.loc[[2], :]) assert_frame_equal(simple.drop( [0, 3], axis='index'), simple.loc[[1, 2], :]) pytest.raises(ValueError, simple.drop, 5) pytest.raises(ValueError, simple.drop, 'C', 1) pytest.raises(ValueError, simple.drop, [1, 5]) pytest.raises(ValueError, simple.drop, ['A', 'C'], 1) # errors = 'ignore' assert_frame_equal(simple.drop(5, errors='ignore'), simple) assert_frame_equal(simple.drop([0, 5], errors='ignore'), simple.loc[[1, 2, 3], :]) assert_frame_equal(simple.drop('C', axis=1, errors='ignore'), simple) assert_frame_equal(simple.drop(['A', 'C'], axis=1, errors='ignore'), simple[['B']]) # non-unique - wheee! nu_df = DataFrame(lzip(range(3), range(-3, 1), list('abc')), columns=['a', 'a', 'b']) assert_frame_equal(nu_df.drop('a', axis=1), nu_df[['b']]) assert_frame_equal(nu_df.drop('b', axis='columns'), nu_df['a']) assert_frame_equal(nu_df.drop([]), nu_df) # GH 16398 nu_df = nu_df.set_index(pd.Index(['X', 'Y', 'X'])) nu_df.columns = list('abc') assert_frame_equal(nu_df.drop('X', axis='rows'), nu_df.loc[["Y"], :]) assert_frame_equal(nu_df.drop(['X', 'Y'], axis=0), nu_df.loc[[], :]) # inplace cache issue # GH 5628 df = pd.DataFrame(np.random.randn(10, 3), columns=list('abc')) expected = df[~(df.b > 0)] df.drop(labels=df[df.b > 0].index, inplace=True) assert_frame_equal(df, expected) def test_drop_multiindex_not_lexsorted(self): # GH 11640 # define the lexsorted version lexsorted_mi = MultiIndex.from_tuples( [('a', ''), ('b1', 'c1'), ('b2', 'c2')], names=['b', 'c']) lexsorted_df = DataFrame([[1, 3, 4]], columns=lexsorted_mi) assert lexsorted_df.columns.is_lexsorted() # define the non-lexsorted version not_lexsorted_df = DataFrame(columns=['a', 'b', 'c', 'd'], data=[[1, 'b1', 'c1', 3], [1, 'b2', 'c2', 4]]) not_lexsorted_df = not_lexsorted_df.pivot_table( index='a', columns=['b', 'c'], values='d') not_lexsorted_df = not_lexsorted_df.reset_index() assert not not_lexsorted_df.columns.is_lexsorted() # compare the results tm.assert_frame_equal(lexsorted_df, not_lexsorted_df) expected = lexsorted_df.drop('a', axis=1) with tm.assert_produces_warning(PerformanceWarning): result = not_lexsorted_df.drop('a', axis=1) tm.assert_frame_equal(result, expected) def test_merge_join_different_levels(self): # GH 9455 # first dataframe df1 = DataFrame(columns=['a', 'b'], data=[[1, 11], [0, 22]]) # second dataframe columns = MultiIndex.from_tuples([('a', ''), ('c', 'c1')]) df2 = DataFrame(columns=columns, data=[[1, 33], [0, 44]]) # merge columns = ['a', 'b', ('c', 'c1')] expected = DataFrame(columns=columns, data=[[1, 11, 33], [0, 22, 44]]) with tm.assert_produces_warning(UserWarning): result = pd.merge(df1, df2, on='a') tm.assert_frame_equal(result, expected) # join, see discussion in GH 12219 columns = ['a', 'b', ('a', ''), ('c', 'c1')] expected = DataFrame(columns=columns, data=[[1, 11, 0, 44], [0, 22, 1, 33]]) with tm.assert_produces_warning(UserWarning): result = df1.join(df2, on='a') tm.assert_frame_equal(result, expected) def test_reindex(self): newFrame = self.frame.reindex(self.ts1.index) for col in newFrame.columns: for idx, val in compat.iteritems(newFrame[col]): if idx in self.frame.index: if np.isnan(val): assert np.isnan(self.frame[col][idx]) else: assert val == self.frame[col][idx] else: assert np.isnan(val) for col, series in compat.iteritems(newFrame): assert tm.equalContents(series.index, newFrame.index) emptyFrame = self.frame.reindex(Index([])) assert len(emptyFrame.index) == 0 # Cython code should be unit-tested directly nonContigFrame = self.frame.reindex(self.ts1.index[::2]) for col in nonContigFrame.columns: for idx, val in compat.iteritems(nonContigFrame[col]): if idx in self.frame.index: if np.isnan(val): assert np.isnan(self.frame[col][idx]) else: assert val == self.frame[col][idx] else: assert np.isnan(val) for col, series in compat.iteritems(nonContigFrame): assert tm.equalContents(series.index, nonContigFrame.index) # corner cases # Same index, copies values but not index if copy=False newFrame = self.frame.reindex(self.frame.index, copy=False) assert newFrame.index is self.frame.index # length zero newFrame = self.frame.reindex([]) assert newFrame.empty assert len(newFrame.columns) == len(self.frame.columns) # length zero with columns reindexed with non-empty index newFrame = self.frame.reindex([]) newFrame = newFrame.reindex(self.frame.index) assert len(newFrame.index) == len(self.frame.index) assert len(newFrame.columns) == len(self.frame.columns) # pass non-Index newFrame = self.frame.reindex(list(self.ts1.index)) tm.assert_index_equal(newFrame.index, self.ts1.index) # copy with no axes result = self.frame.reindex() assert_frame_equal(result, self.frame) assert result is not self.frame def test_reindex_nan(self): df = pd.DataFrame([[1, 2], [3, 5], [7, 11], [9, 23]], index=[2, np.nan, 1, 5], columns=['joe', 'jim']) i, j = [np.nan, 5, 5, np.nan, 1, 2, np.nan], [1, 3, 3, 1, 2, 0, 1] assert_frame_equal(df.reindex(i), df.iloc[j]) df.index = df.index.astype('object') assert_frame_equal(df.reindex(i), df.iloc[j], check_index_type=False) # GH10388 df = pd.DataFrame({'other': ['a', 'b', np.nan, 'c'], 'date': ['2015-03-22', np.nan, '2012-01-08', np.nan], 'amount': [2, 3, 4, 5]}) df['date'] = pd.to_datetime(df.date) df['delta'] = (pd.to_datetime('2015-06-18') - df['date']).shift(1) left = df.set_index(['delta', 'other', 'date']).reset_index() right = df.reindex(columns=['delta', 'other', 'date', 'amount']) assert_frame_equal(left, right) def test_reindex_name_remains(self): s = Series(random.rand(10)) df = DataFrame(s, index=np.arange(len(s))) i = Series(np.arange(10), name='iname') df = df.reindex(i) assert df.index.name == 'iname' df = df.reindex(Index(np.arange(10), name='tmpname')) assert df.index.name == 'tmpname' s = Series(random.rand(10)) df = DataFrame(s.T, index=np.arange(len(s))) i = Series(np.arange(10), name='iname') df = df.reindex(columns=i) assert df.columns.name == 'iname' def test_reindex_int(self): smaller = self.intframe.reindex(self.intframe.index[::2]) assert smaller['A'].dtype == np.int64 bigger = smaller.reindex(self.intframe.index) assert bigger['A'].dtype == np.float64 smaller = self.intframe.reindex(columns=['A', 'B']) assert smaller['A'].dtype == np.int64 def test_reindex_like(self): other = self.frame.reindex(index=self.frame.index[:10], columns=['C', 'B']) assert_frame_equal(other, self.frame.reindex_like(other)) def test_reindex_columns(self): new_frame = self.frame.reindex(columns=['A', 'B', 'E']) tm.assert_series_equal(new_frame['B'], self.frame['B']) assert np.isnan(new_frame['E']).all() assert 'C' not in new_frame # Length zero new_frame = self.frame.reindex(columns=[]) assert new_frame.empty def test_reindex_columns_method(self): # GH 14992, reindexing over columns ignored method df = DataFrame(data=[[11, 12, 13], [21, 22, 23], [31, 32, 33]], index=[1, 2, 4], columns=[1, 2, 4], dtype=float) # default method result = df.reindex(columns=range(6)) expected = DataFrame(data=[[np.nan, 11, 12, np.nan, 13, np.nan], [np.nan, 21, 22, np.nan, 23, np.nan], [np.nan, 31, 32, np.nan, 33, np.nan]], index=[1, 2, 4], columns=range(6), dtype=float) assert_frame_equal(result, expected) # method='ffill' result = df.reindex(columns=range(6), method='ffill') expected = DataFrame(data=[[np.nan, 11, 12, 12, 13, 13], [np.nan, 21, 22, 22, 23, 23], [np.nan, 31, 32, 32, 33, 33]], index=[1, 2, 4], columns=range(6), dtype=float) assert_frame_equal(result, expected) # method='bfill' result = df.reindex(columns=range(6), method='bfill') expected = DataFrame(data=[[11, 11, 12, 13, 13, np.nan], [21, 21, 22, 23, 23, np.nan], [31, 31, 32, 33, 33, np.nan]], index=[1, 2, 4], columns=range(6), dtype=float) assert_frame_equal(result, expected) def test_reindex_axes(self): # GH 3317, reindexing by both axes loses freq of the index df = DataFrame(np.ones((3, 3)), index=[datetime(2012, 1, 1), datetime(2012, 1, 2), datetime(2012, 1, 3)], columns=['a', 'b', 'c']) time_freq = date_range('2012-01-01', '2012-01-03', freq='d') some_cols = ['a', 'b'] index_freq = df.reindex(index=time_freq).index.freq both_freq = df.reindex(index=time_freq, columns=some_cols).index.freq seq_freq = df.reindex(index=time_freq).reindex( columns=some_cols).index.freq assert index_freq == both_freq assert index_freq == seq_freq def test_reindex_fill_value(self): df = DataFrame(np.random.randn(10, 4)) # axis=0 result = df.reindex(lrange(15)) assert np.isnan(result.values[-5:]).all() result = df.reindex(lrange(15), fill_value=0) expected = df.reindex(lrange(15)).fillna(0) assert_frame_equal(result, expected) # axis=1 result = df.reindex(columns=lrange(5), fill_value=0.) expected = df.copy() expected[4] = 0. assert_frame_equal(result, expected) result = df.reindex(columns=lrange(5), fill_value=0) expected = df.copy() expected[4] = 0 assert_frame_equal(result, expected) result = df.reindex(columns=lrange(5), fill_value='foo') expected = df.copy() expected[4] = 'foo' assert_frame_equal(result, expected) # reindex_axis result = df.reindex_axis(lrange(15), fill_value=0., axis=0) expected = df.reindex(lrange(15)).fillna(0) assert_frame_equal(result, expected) result = df.reindex_axis(lrange(5), fill_value=0., axis=1) expected = df.reindex(columns=lrange(5)).fillna(0) assert_frame_equal(result, expected) # other dtypes df['foo'] = 'foo' result = df.reindex(lrange(15), fill_value=0) expected = df.reindex(lrange(15)).fillna(0) assert_frame_equal(result, expected) def test_reindex_dups(self): # GH4746, reindex on duplicate index error messages arr = np.random.randn(10) df = DataFrame(arr, index=[1, 2, 3, 4, 5, 1, 2, 3, 4, 5]) # set index is ok result = df.copy() result.index = list(range(len(df))) expected = DataFrame(arr, index=list(range(len(df)))) assert_frame_equal(result, expected) # reindex fails pytest.raises(ValueError, df.reindex, index=list(range(len(df)))) def test_align(self): af, bf = self.frame.align(self.frame) assert af._data is not self.frame._data af, bf = self.frame.align(self.frame, copy=False) assert af._data is self.frame._data # axis = 0 other = self.frame.iloc[:-5, :3] af, bf = self.frame.align(other, axis=0, fill_value=-1) tm.assert_index_equal(bf.columns, other.columns) # test fill value join_idx = self.frame.index.join(other.index) diff_a = self.frame.index.difference(join_idx) diff_b = other.index.difference(join_idx) diff_a_vals = af.reindex(diff_a).values diff_b_vals = bf.reindex(diff_b).values assert (diff_a_vals == -1).all() af, bf = self.frame.align(other, join='right', axis=0) tm.assert_index_equal(bf.columns, other.columns) tm.assert_index_equal(bf.index, other.index) tm.assert_index_equal(af.index, other.index) # axis = 1 other = self.frame.iloc[:-5, :3].copy() af, bf = self.frame.align(other, axis=1) tm.assert_index_equal(bf.columns, self.frame.columns) tm.assert_index_equal(bf.index, other.index) # test fill value join_idx = self.frame.index.join(other.index) diff_a = self.frame.index.difference(join_idx) diff_b = other.index.difference(join_idx) diff_a_vals = af.reindex(diff_a).values # TODO(wesm): unused? diff_b_vals = bf.reindex(diff_b).values # noqa assert (diff_a_vals == -1).all() af, bf = self.frame.align(other, join='inner', axis=1) tm.assert_index_equal(bf.columns, other.columns) af, bf = self.frame.align(other, join='inner', axis=1, method='pad') tm.assert_index_equal(bf.columns, other.columns) # test other non-float types af, bf = self.intframe.align(other, join='inner', axis=1, method='pad') tm.assert_index_equal(bf.columns, other.columns) af, bf = self.mixed_frame.align(self.mixed_frame, join='inner', axis=1, method='pad') tm.assert_index_equal(bf.columns, self.mixed_frame.columns) af, bf = self.frame.align(other.iloc[:, 0], join='inner', axis=1, method=None, fill_value=None) tm.assert_index_equal(bf.index, Index([])) af, bf = self.frame.align(other.iloc[:, 0], join='inner', axis=1, method=None, fill_value=0) tm.assert_index_equal(bf.index, Index([])) # mixed floats/ints af, bf = self.mixed_float.align(other.iloc[:, 0], join='inner', axis=1, method=None, fill_value=0) tm.assert_index_equal(bf.index, Index([])) af, bf = self.mixed_int.align(other.iloc[:, 0], join='inner', axis=1, method=None, fill_value=0) tm.assert_index_equal(bf.index, Index([])) # Try to align DataFrame to Series along bad axis with pytest.raises(ValueError): self.frame.align(af.iloc[0, :3], join='inner', axis=2) # align dataframe to series with broadcast or not idx = self.frame.index s = Series(range(len(idx)), index=idx) left, right = self.frame.align(s, axis=0) tm.assert_index_equal(left.index, self.frame.index) tm.assert_index_equal(right.index, self.frame.index) assert isinstance(right, Series) left, right = self.frame.align(s, broadcast_axis=1) tm.assert_index_equal(left.index, self.frame.index) expected = {} for c in self.frame.columns: expected[c] = s expected = DataFrame(expected, index=self.frame.index, columns=self.frame.columns) tm.assert_frame_equal(right, expected) # see gh-9558 df = DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6]}) result = df[df['a'] == 2] expected = DataFrame([[2, 5]], index=[1], columns=['a', 'b']) tm.assert_frame_equal(result, expected) result = df.where(df['a'] == 2, 0) expected = DataFrame({'a': [0, 2, 0], 'b': [0, 5, 0]}) tm.assert_frame_equal(result, expected) def _check_align(self, a, b, axis, fill_axis, how, method, limit=None): aa, ab = a.align(b, axis=axis, join=how, method=method, limit=limit, fill_axis=fill_axis) join_index, join_columns = None, None ea, eb = a, b if axis is None or axis == 0: join_index = a.index.join(b.index, how=how) ea = ea.reindex(index=join_index) eb = eb.reindex(index=join_index) if axis is None or axis == 1: join_columns = a.columns.join(b.columns, how=how) ea = ea.reindex(columns=join_columns) eb = eb.reindex(columns=join_columns) ea = ea.fillna(axis=fill_axis, method=method, limit=limit) eb = eb.fillna(axis=fill_axis, method=method, limit=limit) assert_frame_equal(aa, ea) assert_frame_equal(ab, eb) def test_align_fill_method_inner(self): for meth in ['pad', 'bfill']: for ax in [0, 1, None]: for fax in [0, 1]: self._check_align_fill('inner', meth, ax, fax) def test_align_fill_method_outer(self): for meth in ['pad', 'bfill']: for ax in [0, 1, None]: for fax in [0, 1]: self._check_align_fill('outer', meth, ax, fax) def test_align_fill_method_left(self): for meth in ['pad', 'bfill']: for ax in [0, 1, None]: for fax in [0, 1]: self._check_align_fill('left', meth, ax, fax) def test_align_fill_method_right(self): for meth in ['pad', 'bfill']: for ax in [0, 1, None]: for fax in [0, 1]: self._check_align_fill('right', meth, ax, fax) def _check_align_fill(self, kind, meth, ax, fax): left = self.frame.iloc[0:4, :10] right = self.frame.iloc[2:, 6:] empty = self.frame.iloc[:0, :0] self._check_align(left, right, axis=ax, fill_axis=fax, how=kind, method=meth) self._check_align(left, right, axis=ax, fill_axis=fax, how=kind, method=meth, limit=1) # empty left self._check_align(empty, right, axis=ax, fill_axis=fax, how=kind, method=meth) self._check_align(empty, right, axis=ax, fill_axis=fax, how=kind, method=meth, limit=1) # empty right self._check_align(left, empty, axis=ax, fill_axis=fax, how=kind, method=meth) self._check_align(left, empty, axis=ax, fill_axis=fax, how=kind, method=meth, limit=1) # both empty self._check_align(empty, empty, axis=ax, fill_axis=fax, how=kind, method=meth) self._check_align(empty, empty, axis=ax, fill_axis=fax, how=kind, method=meth, limit=1) def test_align_int_fill_bug(self): # GH #910 X = np.arange(10 * 10, dtype='float64').reshape(10, 10) Y = np.ones((10, 1), dtype=int) df1 = DataFrame(X) df1['0.X'] = Y.squeeze() df2 = df1.astype(float) result = df1 - df1.mean() expected = df2 - df2.mean() assert_frame_equal(result, expected) def test_align_multiindex(self): # GH 10665 # same test cases as test_align_multiindex in test_series.py midx = pd.MultiIndex.from_product([range(2), range(3), range(2)], names=('a', 'b', 'c')) idx = pd.Index(range(2), name='b') df1 = pd.DataFrame(np.arange(12, dtype='int64'), index=midx) df2 = pd.DataFrame(np.arange(2, dtype='int64'), index=idx) # these must be the same results (but flipped) res1l, res1r = df1.align(df2, join='left') res2l, res2r = df2.align(df1, join='right') expl = df1 assert_frame_equal(expl, res1l) assert_frame_equal(expl, res2r) expr = pd.DataFrame([0, 0, 1, 1, np.nan, np.nan] * 2, index=midx) assert_frame_equal(expr, res1r) assert_frame_equal(expr, res2l) res1l, res1r = df1.align(df2, join='right') res2l, res2r = df2.align(df1, join='left') exp_idx = pd.MultiIndex.from_product([range(2), range(2), range(2)], names=('a', 'b', 'c')) expl = pd.DataFrame([0, 1, 2, 3, 6, 7, 8, 9], index=exp_idx) assert_frame_equal(expl, res1l) assert_frame_equal(expl, res2r) expr = pd.DataFrame([0, 0, 1, 1] * 2, index=exp_idx) assert_frame_equal(expr, res1r) assert_frame_equal(expr, res2l) def test_align_series_combinations(self): df = pd.DataFrame({'a': [1, 3, 5], 'b': [1, 3, 5]}, index=list('ACE')) s = pd.Series([1, 2, 4], index=list('ABD'), name='x') # frame + series res1, res2 = df.align(s, axis=0) exp1 = pd.DataFrame({'a': [1, np.nan, 3, np.nan, 5], 'b': [1, np.nan, 3, np.nan, 5]}, index=list('ABCDE')) exp2 = pd.Series([1, 2, np.nan, 4, np.nan], index=list('ABCDE'), name='x') tm.assert_frame_equal(res1, exp1) tm.assert_series_equal(res2, exp2) # series + frame res1, res2 = s.align(df) tm.assert_series_equal(res1, exp2) tm.assert_frame_equal(res2, exp1) def test_filter(self): # Items filtered = self.frame.filter(['A', 'B', 'E']) assert len(filtered.columns) == 2 assert 'E' not in filtered filtered = self.frame.filter(['A', 'B', 'E'], axis='columns') assert len(filtered.columns) == 2 assert 'E' not in filtered # Other axis idx = self.frame.index[0:4] filtered = self.frame.filter(idx, axis='index') expected = self.frame.reindex(index=idx) tm.assert_frame_equal(filtered, expected) # like fcopy = self.frame.copy() fcopy['AA'] = 1 filtered = fcopy.filter(like='A') assert len(filtered.columns) == 2 assert 'AA' in filtered # like with ints in column names df = DataFrame(0., index=[0, 1, 2], columns=[0, 1, '_A', '_B']) filtered = df.filter(like='_') assert len(filtered.columns) == 2 # regex with ints in column names # from PR #10384 df = DataFrame(0., index=[0, 1, 2], columns=['A1', 1, 'B', 2, 'C']) expected = DataFrame( 0., index=[0, 1, 2], columns=pd.Index([1, 2], dtype=object)) filtered = df.filter(regex='^[0-9]+$') tm.assert_frame_equal(filtered, expected) expected = DataFrame(0., index=[0, 1, 2], columns=[0, '0', 1, '1']) # shouldn't remove anything filtered = expected.filter(regex='^[0-9]+$') tm.assert_frame_equal(filtered, expected) # pass in None with tm.assert_raises_regex(TypeError, 'Must pass'): self.frame.filter() with tm.assert_raises_regex(TypeError, 'Must pass'): self.frame.filter(items=None) with tm.assert_raises_regex(TypeError, 'Must pass'): self.frame.filter(axis=1) # test mutually exclusive arguments with tm.assert_raises_regex(TypeError, 'mutually exclusive'): self.frame.filter(items=['one', 'three'], regex='e$', like='bbi') with tm.assert_raises_regex(TypeError, 'mutually exclusive'): self.frame.filter(items=['one', 'three'], regex='e$', axis=1) with tm.assert_raises_regex(TypeError, 'mutually exclusive'): self.frame.filter(items=['one', 'three'], regex='e$') with tm.assert_raises_regex(TypeError, 'mutually exclusive'): self.frame.filter(items=['one', 'three'], like='bbi', axis=0) with tm.assert_raises_regex(TypeError, 'mutually exclusive'): self.frame.filter(items=['one', 'three'], like='bbi') # objects filtered = self.mixed_frame.filter(like='foo') assert 'foo' in filtered # unicode columns, won't ascii-encode df = self.frame.rename(columns={'B': u('\u2202')}) filtered = df.filter(like='C') assert 'C' in filtered def test_filter_regex_search(self): fcopy = self.frame.copy() fcopy['AA'] = 1 # regex filtered = fcopy.filter(regex='[A]+') assert len(filtered.columns) == 2 assert 'AA' in filtered # doesn't have to be at beginning df = DataFrame({'aBBa': [1, 2], 'BBaBB': [1, 2], 'aCCa': [1, 2], 'aCCaBB': [1, 2]}) result = df.filter(regex='BB') exp = df[[x for x in df.columns if 'BB' in x]] assert_frame_equal(result, exp) def test_filter_corner(self): empty = DataFrame() result = empty.filter([]) assert_frame_equal(result, empty) result = empty.filter(like='foo') assert_frame_equal(result, empty) def test_select(self): f = lambda x: x.weekday() == 2 result = self.tsframe.select(f, axis=0) expected = self.tsframe.reindex( index=self.tsframe.index[[f(x) for x in self.tsframe.index]]) assert_frame_equal(result, expected) result = self.frame.select(lambda x: x in ('B', 'D'), axis=1) expected = self.frame.reindex(columns=['B', 'D']) # TODO should reindex check_names? assert_frame_equal(result, expected, check_names=False) def test_take(self): # homogeneous order = [3, 1, 2, 0] for df in [self.frame]: result = df.take(order, axis=0) expected = df.reindex(df.index.take(order)) assert_frame_equal(result, expected) # axis = 1 result = df.take(order, axis=1) expected = df.loc[:, ['D', 'B', 'C', 'A']] assert_frame_equal(result, expected, check_names=False) # neg indicies order = [2, 1, -1] for df in [self.frame]: result = df.take(order, axis=0) expected = df.reindex(df.index.take(order)) assert_frame_equal(result, expected) # axis = 1 result = df.take(order, axis=1) expected = df.loc[:, ['C', 'B', 'D']] assert_frame_equal(result, expected, check_names=False) # illegal indices pytest.raises(IndexError, df.take, [3, 1, 2, 30], axis=0) pytest.raises(IndexError, df.take, [3, 1, 2, -31], axis=0) pytest.raises(IndexError, df.take, [3, 1, 2, 5], axis=1) pytest.raises(IndexError, df.take, [3, 1, 2, -5], axis=1) # mixed-dtype order = [4, 1, 2, 0, 3] for df in [self.mixed_frame]: result = df.take(order, axis=0) expected = df.reindex(df.index.take(order)) assert_frame_equal(result, expected) # axis = 1 result = df.take(order, axis=1) expected = df.loc[:, ['foo', 'B', 'C', 'A', 'D']] assert_frame_equal(result, expected) # neg indicies order = [4, 1, -2] for df in [self.mixed_frame]: result = df.take(order, axis=0) expected = df.reindex(df.index.take(order)) assert_frame_equal(result, expected) # axis = 1 result = df.take(order, axis=1) expected = df.loc[:, ['foo', 'B', 'D']] assert_frame_equal(result, expected) # by dtype order = [1, 2, 0, 3] for df in [self.mixed_float, self.mixed_int]: result = df.take(order, axis=0) expected = df.reindex(df.index.take(order)) assert_frame_equal(result, expected) # axis = 1 result = df.take(order, axis=1) expected = df.loc[:, ['B', 'C', 'A', 'D']] assert_frame_equal(result, expected) def test_reindex_boolean(self): frame = DataFrame(np.ones((10, 2), dtype=bool), index=np.arange(0, 20, 2), columns=[0, 2]) reindexed = frame.reindex(np.arange(10)) assert reindexed.values.dtype == np.object_ assert isna(reindexed[0][1]) reindexed = frame.reindex(columns=lrange(3)) assert reindexed.values.dtype == np.object_ assert isna(reindexed[1]).all() def test_reindex_objects(self): reindexed = self.mixed_frame.reindex(columns=['foo', 'A', 'B']) assert 'foo' in reindexed reindexed = self.mixed_frame.reindex(columns=['A', 'B']) assert 'foo' not in reindexed def test_reindex_corner(self): index = Index(['a', 'b', 'c']) dm = self.empty.reindex(index=[1, 2, 3]) reindexed = dm.reindex(columns=index) tm.assert_index_equal(reindexed.columns, index) # ints are weird smaller = self.intframe.reindex(columns=['A', 'B', 'E']) assert smaller['E'].dtype == np.float64 def test_reindex_axis(self): cols = ['A', 'B', 'E'] reindexed1 = self.intframe.reindex_axis(cols, axis=1) reindexed2 = self.intframe.reindex(columns=cols) assert_frame_equal(reindexed1, reindexed2) rows = self.intframe.index[0:5] reindexed1 = self.intframe.reindex_axis(rows, axis=0) reindexed2 = self.intframe.reindex(index=rows) assert_frame_equal(reindexed1, reindexed2) pytest.raises(ValueError, self.intframe.reindex_axis, rows, axis=2) # no-op case cols = self.frame.columns.copy() newFrame = self.frame.reindex_axis(cols, axis=1) assert_frame_equal(newFrame, self.frame) def test_reindex_with_nans(self): df = DataFrame([[1, 2], [3, 4], [np.nan, np.nan], [7, 8], [9, 10]], columns=['a', 'b'], index=[100.0, 101.0, np.nan, 102.0, 103.0]) result = df.reindex(index=[101.0, 102.0, 103.0]) expected = df.iloc[[1, 3, 4]] assert_frame_equal(result, expected) result = df.reindex(index=[103.0]) expected = df.iloc[[4]] assert_frame_equal(result, expected) result = df.reindex(index=[101.0]) expected = df.iloc[[1]] assert_frame_equal(result, expected) def test_reindex_multi(self): df = DataFrame(np.random.randn(3, 3)) result = df.reindex(lrange(4), lrange(4)) expected = df.reindex(lrange(4)).reindex(columns=lrange(4)) assert_frame_equal(result, expected) df = DataFrame(np.random.randint(0, 10, (3, 3))) result = df.reindex(lrange(4), lrange(4)) expected = df.reindex(lrange(4)).reindex(columns=lrange(4)) assert_frame_equal(result, expected) df = DataFrame(np.random.randint(0, 10, (3, 3))) result = df.reindex(lrange(2), lrange(2)) expected = df.reindex(lrange(2)).reindex(columns=lrange(2)) assert_frame_equal(result, expected) df = DataFrame(np.random.randn(5, 3) + 1j, columns=['a', 'b', 'c']) result = df.reindex(index=[0, 1], columns=['a', 'b']) expected = df.reindex([0, 1]).reindex(columns=['a', 'b']) assert_frame_equal(result, expected)
DGrady/pandas
pandas/tests/frame/test_axis_select_reindex.py
pandas/core/indexes/base.py
from __future__ import print_function, absolute_import, division from future.builtins import * from future import standard_library standard_library.install_aliases() # Copyright 2017 Autodesk Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import itertools import string import Bio.PDB import Bio.PDB.MMCIF2Dict import numpy as np import moldesign as mdt from moldesign import units as u from moldesign.helpers.pdb import BioAssembly from moldesign.utils import exports @exports def biopython_to_mol(struc): """Convert a biopython PDB structure to an MDT molecule. Note: Biopython doesn't deal with bond data, so no bonds will be present in the Molecule Args: struc (Bio.PDB.Structure.Structure): Biopython PDB structure to convert Returns: moldesign.Molecule: converted molecule """ # TODO: assign bonds using 1) CONECT records, 2) residue templates, 3) distance newatoms = [] backup_chain_names = list(string.ascii_uppercase) for chain in struc.get_chains(): tmp, pdbidx, pdbid = chain.get_full_id() if not pdbid.strip(): pdbid = backup_chain_names.pop() newchain = mdt.Chain(pdbname=pdbid.strip()) for residue in chain.get_residues(): newresidue = mdt.Residue(pdbname=residue.resname.strip(), pdbindex=residue.id[1]) newchain.add(newresidue) for atom in residue.get_atom(): elem = atom.element if len(elem) == 2: elem = elem[0] + elem[1].lower() newatom = mdt.Atom(element=elem, name=atom.get_name(), pdbname=atom.get_name(), pdbindex=atom.get_serial_number()) newatom.position = atom.coord * u.angstrom newresidue.add(newatom) newatoms.append(newatom) return mdt.Molecule(newatoms, name=struc.get_full_id()[0]) def get_mmcif_assemblies(fileobj=None, mmcdata=None): """Parse an mmCIF file, return biomolecular assembly specifications Args: fileobj (file-like): File-like object for the PDB file (this object will be rewound before returning) mmcdata (dict): dict version of complete mmCIF data structure (if passed, this will not be read again from fileobj) Returns: Mapping[str, BioAssembly]: dict mapping assembly ids to BioAssembly instances """ if mmcdata is None: mmcdata = get_mmcif_data(fileobj) if '_pdbx_struct_assembly.id' not in mmcdata: return {} # no assemblies present # Get assembly metadata ids = mmcdata['_pdbx_struct_assembly.id'] details = mmcdata['_pdbx_struct_assembly.details'] chains = mmcdata['_pdbx_struct_assembly_gen.asym_id_list'] opers = mmcdata['_pdbx_struct_assembly_gen.oper_expression'] transform_ids = mmcdata['_pdbx_struct_oper_list.id'] # Get matrix transformations tmat = np.zeros((4, 4)).tolist() for i in range(3): # construct displacement vector tmat[i][3] = mmcdata['_pdbx_struct_oper_list.vector[%d]' % (i+1)] for i, j in itertools.product(range(0, 3), range(0, 3)): # construct rotation matrix tmat[i][j] = mmcdata['_pdbx_struct_oper_list.matrix[%d][%d]' % (i+1, j+1)] transforms = _make_transform_dict(tmat, transform_ids) # Make sure it's a list if not isinstance(ids, list): ids = [ids] details = [details] chains = [chains] opers = [opers] # now create the assembly specifications assemblies = {} for id, detail, chainlist, operlist in zip(ids, details, chains, opers): assert id not in assemblies transforms = [transforms[i] for i in operlist.split(',')] assemblies[id] = BioAssembly(detail, chainlist.split(','), transforms) return assemblies def _make_transform_dict(tmat, transform_ids): if isinstance(transform_ids, list): for i, j in itertools.product(range(0, 3), range(0, 4)): tmat[i][j] = list(map(float, tmat[i][j])) tmat[3][3] = [1.0]*len(transform_ids) tmat[3][0] = tmat[3][1] = tmat[3][2] = [0.0]*len(transform_ids) tmat = np.array(tmat) transforms = {id: tmat[:, :, i] for i, id in enumerate(transform_ids)} else: for i, j in itertools.product(range(0, 4), range(0, 4)): tmat[i][j] = float(tmat[i][j]) tmat[3][3] = 1.0 tmat = np.array(tmat) transforms = {transform_ids: tmat} return transforms def get_mmcif_data(fileobj): mmcdata = Bio.PDB.MMCIF2Dict.MMCIF2Dict(fileobj) fileobj.seek(0) # rewind for future access return mmcdata
from __future__ import print_function import subprocess import os import pytest import moldesign as mdt from moldesign.external import pathlib __PYTEST_MARK__ = 'io' @pytest.fixture def example_path(tmpdir): path = pathlib.Path(str(tmpdir)) subprocess.check_call('python -m moldesign copyexamples', shell=True, cwd=str(path)) return path @pytest.mark.screening def test_exampled_copied(example_path): path = example_path assert (path / 'moldesign-examples').is_dir() with (path / 'moldesign-examples' / '.mdtversion').open('r') as verfile: assert verfile.read().strip() == mdt.__version__ def test_no_overwrite_examples(example_path): path = example_path try: subprocess.check_call('python -m moldesign copyexamples', shell=True, cwd=str(path)) except subprocess.CalledProcessError as err: assert err.returncode == 200 else: assert False, "Expected CalledProcessError" def test_example_version_warning(tmpdir): path = example_path(tmpdir) # call this directly because we mangle the test dir version with (path / 'moldesign-examples' / '.mdtversion').open('w') as verfile: verfile.write(u'0.1.0') try: subprocess.check_call('python -m moldesign copyexamples', shell=True, cwd=str(path)) except subprocess.CalledProcessError as err: assert err.returncode == 201 else: assert False, "Expected CalledProcessError" def test_version_command(): ver = subprocess.check_output('python -m moldesign version'.split()).splitlines()[-1] assert ver.decode('ascii') == mdt.__version__ def test_dumpenv_command(): # just test that it doesn't error subprocess.check_call('python -m moldesign dumpenv'.split()) def test_print_environment(): # just test that it still works mdt.data.print_environment() def test_config_command(tmpdir): tempconfpath = os.path.join(str(tmpdir), 'moldesign.yml') with open(tempconfpath, 'w') as tempfile: print('devmode: True\nx: y', file=tempfile) # just test that it doesn't error subprocess.check_call(['python', '-m', 'moldesign', '--config-file', tempconfpath, 'config'])
Autodesk/molecular-design-toolkit
moldesign/_tests/test_cli.py
moldesign/interfaces/biopython_interface.py
# This file is part of Indico. # Copyright (C) 2002 - 2020 CERN # # Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. from __future__ import unicode_literals from indico.core.db import db from indico.util.string import format_repr, return_ascii RoomEquipmentAssociation = db.Table( 'room_equipment', db.metadata, db.Column( 'equipment_id', db.Integer, db.ForeignKey('roombooking.equipment_types.id'), primary_key=True, ), db.Column( 'room_id', db.Integer, db.ForeignKey('roombooking.rooms.id'), primary_key=True ), schema='roombooking' ) equipment_features_table = db.Table( 'equipment_features', db.metadata, db.Column( 'equipment_id', db.Integer, db.ForeignKey('roombooking.equipment_types.id'), primary_key=True, ), db.Column( 'feature_id', db.Integer, db.ForeignKey('roombooking.features.id'), primary_key=True ), schema='roombooking' ) class EquipmentType(db.Model): __tablename__ = 'equipment_types' __table_args__ = {'schema': 'roombooking'} id = db.Column( db.Integer, primary_key=True ) name = db.Column( db.String, nullable=False, index=True, unique=True ) features = db.relationship( 'RoomFeature', secondary=equipment_features_table, backref='equipment_types', lazy=True ) # relationship backrefs: # - rooms (Room.available_equipment) @return_ascii def __repr__(self): return format_repr(self, 'id', 'name')
# This file is part of Indico. # Copyright (C) 2002 - 2020 CERN # # Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. from __future__ import unicode_literals from datetime import date, datetime, time, timedelta import pytest import pytz from indico.modules.rb import rb_settings from indico.modules.rb.models.reservations import ReservationState from indico.modules.rb.util import (get_booking_params_for_event, get_prebooking_collisions, rb_check_user_access, rb_is_admin) from indico.testing.util import bool_matrix pytest_plugins = ('indico.modules.rb.testing.fixtures', 'indico.modules.events.timetable.testing.fixtures') @pytest.mark.parametrize(('is_rb_admin', 'acl_empty', 'in_acl', 'expected'), bool_matrix('...', expect=any)) def test_rb_check_user_access(db, mocker, dummy_user, dummy_group, is_rb_admin, acl_empty, in_acl, expected): if is_rb_admin: mocker.patch('indico.modules.rb.util.rb_is_admin', return_value=True) if not acl_empty: rb_settings.acls.add_principal('authorized_principals', dummy_group) if in_acl: rb_settings.acls.add_principal('authorized_principals', dummy_user) assert rb_check_user_access(dummy_user) == expected @pytest.mark.parametrize(('is_admin', 'is_rb_admin', 'expected'), bool_matrix('..', expect=any)) def test_rb_is_admin(create_user, is_admin, is_rb_admin, expected): user = create_user(1, admin=is_admin, rb_admin=is_rb_admin) assert rb_is_admin(user) == expected @pytest.mark.parametrize(('start_dt', 'end_dt', 'expected_params'), ( # single-day event (datetime(2019, 8, 16, 10, 0), datetime(2019, 8, 16, 13, 0), {'recurrence': 'single', 'interval': 'week', 'number': 1, 'sd': '2019-08-16', 'ed': None, 'st': '10:00', 'et': '13:00'}), # multi-day event (datetime(2019, 8, 16, 10, 0), datetime(2019, 8, 18, 13, 0), {'recurrence': 'daily', 'interval': 'week', 'number': 1, 'sd': '2019-08-16', 'ed': '2019-08-18', 'st': '10:00', 'et': '13:00'}), # end time < start time (datetime(2019, 8, 16, 16, 0), datetime(2019, 8, 18, 13, 0), {'sd': '2019-08-16'}) )) def test_get_booking_params_for_event_same_times(create_event, dummy_room, start_dt, end_dt, expected_params): start_dt = pytz.utc.localize(start_dt) end_dt = pytz.utc.localize(end_dt) event = create_event(start_dt=start_dt, end_dt=end_dt, room=dummy_room) params = get_booking_params_for_event(event) assert params == { 'type': 'same_times', 'params': dict({ 'link_id': event.id, 'link_type': 'event', 'text': '#{}'.format(dummy_room.id), }, **expected_params) } @pytest.mark.parametrize(('start_time', 'end_time', 'expected_params'), ( # start time < end time (time(10), time(13), {'interval': 'week', 'number': 1, 'recurrence': 'single', 'st': '10:00', 'et': '13:00'}), # end time < start time (time(15), time(13), {}), )) def test_get_booking_params_for_event_multiple_times(create_event, create_contribution, create_entry, dummy_room, start_time, end_time, expected_params): start_dt = pytz.utc.localize(datetime.combine(date(2019, 8, 16), start_time)) end_dt = pytz.utc.localize(datetime.combine(date(2019, 8, 18), end_time)) event = create_event(start_dt=start_dt, end_dt=end_dt, room=dummy_room) c1 = create_contribution(event, 'C1', timedelta(minutes=30)) c2 = create_contribution(event, 'C2', timedelta(minutes=120)) c3 = create_contribution(event, 'C3', timedelta(minutes=30)) create_entry(c1, pytz.utc.localize(datetime(2019, 8, 17, 9, 0))) create_entry(c2, pytz.utc.localize(datetime(2019, 8, 17, 18, 0))) create_entry(c3, pytz.utc.localize(datetime(2019, 8, 17, 19, 0))) params = get_booking_params_for_event(event) assert params == { 'type': 'mixed_times', 'params': { 'link_type': 'event', 'link_id': event.id, 'text': '#{}'.format(dummy_room.id), }, 'time_info': [ (date(2019, 8, 16), dict({'sd': '2019-08-16'}, **expected_params)), # this day has timetable entries -> not using the event defaults (date(2019, 8, 17), {'interval': 'week', 'number': 1, 'recurrence': 'single', 'sd': '2019-08-17', 'st': '09:00', 'et': '20:00'}), (date(2019, 8, 18), dict({'sd': '2019-08-18'}, **expected_params)) ] } def test_get_booking_params_timezone(create_event): chicago_tz = pytz.timezone('America/Chicago') start_dt = chicago_tz.localize(datetime(2019, 8, 16, 8, 0)).astimezone(pytz.utc) end_dt = chicago_tz.localize(datetime(2019, 8, 18, 22, 0)).astimezone(pytz.utc) event = create_event(start_dt=start_dt, end_dt=end_dt, timezone='America/Chicago') assert get_booking_params_for_event(event) == { 'type': 'same_times', 'params': { 'sd': '2019-08-16', 'st': '08:00', 'ed': '2019-08-18', 'et': '22:00', 'interval': 'week', 'number': 1, 'recurrence': 'daily', 'link_id': event.id, 'link_type': 'event', 'text': None } } def test_get_prebooking_collisions(create_reservation, dummy_user, freeze_time): freeze_time(datetime(2020, 3, 20, 12, 0, 0)) start_dt = datetime(2020, 4, 1, 9, 0) end_dt = datetime(2020, 4, 1, 12, 0) res1 = create_reservation(start_dt=start_dt, end_dt=end_dt, state=ReservationState.pending) res2 = create_reservation(start_dt=start_dt, end_dt=end_dt, state=ReservationState.pending) create_reservation(start_dt=end_dt, end_dt=datetime(2020, 4, 1, 15, 0), state=ReservationState.pending) res_cancelled = create_reservation(start_dt=start_dt, end_dt=end_dt, state=ReservationState.pending) res_cancelled.cancel(dummy_user, silent=True) res_rejected = create_reservation(start_dt=start_dt, end_dt=end_dt, state=ReservationState.pending) res_rejected.reject(dummy_user, 'Testing', silent=True) collisions = get_prebooking_collisions(res1) assert collisions == [res2.occurrences.one()]
mic4ael/indico
indico/modules/rb/util_test.py
indico/modules/rb/models/equipment.py
import os import genapi from genapi import \ TypeApi, GlobalVarApi, FunctionApi, BoolValuesApi import numpy_api # use annotated api when running under cpychecker h_template = r""" #if defined(_MULTIARRAYMODULE) || defined(WITH_CPYCHECKER_STEALS_REFERENCE_TO_ARG_ATTRIBUTE) typedef struct { PyObject_HEAD npy_bool obval; } PyBoolScalarObject; extern NPY_NO_EXPORT PyTypeObject PyArrayMapIter_Type; extern NPY_NO_EXPORT PyTypeObject PyArrayNeighborhoodIter_Type; extern NPY_NO_EXPORT PyBoolScalarObject _PyArrayScalar_BoolValues[2]; %s #else #if defined(PY_ARRAY_UNIQUE_SYMBOL) #define PyArray_API PY_ARRAY_UNIQUE_SYMBOL #endif #if defined(NO_IMPORT) || defined(NO_IMPORT_ARRAY) extern void **PyArray_API; #else #if defined(PY_ARRAY_UNIQUE_SYMBOL) void **PyArray_API; #else static void **PyArray_API=NULL; #endif #endif %s #if !defined(NO_IMPORT_ARRAY) && !defined(NO_IMPORT) static int _import_array(void) { int st; PyObject *numpy = PyImport_ImportModule("numpy.core._multiarray_umath"); PyObject *c_api = NULL; if (numpy == NULL) { return -1; } c_api = PyObject_GetAttrString(numpy, "_ARRAY_API"); Py_DECREF(numpy); if (c_api == NULL) { PyErr_SetString(PyExc_AttributeError, "_ARRAY_API not found"); return -1; } if (!PyCapsule_CheckExact(c_api)) { PyErr_SetString(PyExc_RuntimeError, "_ARRAY_API is not PyCapsule object"); Py_DECREF(c_api); return -1; } PyArray_API = (void **)PyCapsule_GetPointer(c_api, NULL); Py_DECREF(c_api); if (PyArray_API == NULL) { PyErr_SetString(PyExc_RuntimeError, "_ARRAY_API is NULL pointer"); return -1; } /* Perform runtime check of C API version */ if (NPY_VERSION != PyArray_GetNDArrayCVersion()) { PyErr_Format(PyExc_RuntimeError, "module compiled against "\ "ABI version 0x%%x but this version of numpy is 0x%%x", \ (int) NPY_VERSION, (int) PyArray_GetNDArrayCVersion()); return -1; } if (NPY_FEATURE_VERSION > PyArray_GetNDArrayCFeatureVersion()) { PyErr_Format(PyExc_RuntimeError, "module compiled against "\ "API version 0x%%x but this version of numpy is 0x%%x", \ (int) NPY_FEATURE_VERSION, (int) PyArray_GetNDArrayCFeatureVersion()); return -1; } /* * Perform runtime check of endianness and check it matches the one set by * the headers (npy_endian.h) as a safeguard */ st = PyArray_GetEndianness(); if (st == NPY_CPU_UNKNOWN_ENDIAN) { PyErr_Format(PyExc_RuntimeError, "FATAL: module compiled as unknown endian"); return -1; } #if NPY_BYTE_ORDER == NPY_BIG_ENDIAN if (st != NPY_CPU_BIG) { PyErr_Format(PyExc_RuntimeError, "FATAL: module compiled as "\ "big endian, but detected different endianness at runtime"); return -1; } #elif NPY_BYTE_ORDER == NPY_LITTLE_ENDIAN if (st != NPY_CPU_LITTLE) { PyErr_Format(PyExc_RuntimeError, "FATAL: module compiled as "\ "little endian, but detected different endianness at runtime"); return -1; } #endif return 0; } #define import_array() {if (_import_array() < 0) {PyErr_Print(); PyErr_SetString(PyExc_ImportError, "numpy.core.multiarray failed to import"); return NULL; } } #define import_array1(ret) {if (_import_array() < 0) {PyErr_Print(); PyErr_SetString(PyExc_ImportError, "numpy.core.multiarray failed to import"); return ret; } } #define import_array2(msg, ret) {if (_import_array() < 0) {PyErr_Print(); PyErr_SetString(PyExc_ImportError, msg); return ret; } } #endif #endif """ c_template = r""" /* These pointers will be stored in the C-object for use in other extension modules */ void *PyArray_API[] = { %s }; """ c_api_header = """ =========== NumPy C-API =========== """ def generate_api(output_dir, force=False): basename = 'multiarray_api' h_file = os.path.join(output_dir, '__%s.h' % basename) c_file = os.path.join(output_dir, '__%s.c' % basename) d_file = os.path.join(output_dir, '%s.txt' % basename) targets = (h_file, c_file, d_file) sources = numpy_api.multiarray_api if (not force and not genapi.should_rebuild(targets, [numpy_api.__file__, __file__])): return targets else: do_generate_api(targets, sources) return targets def do_generate_api(targets, sources): header_file = targets[0] c_file = targets[1] doc_file = targets[2] global_vars = sources[0] scalar_bool_values = sources[1] types_api = sources[2] multiarray_funcs = sources[3] multiarray_api = sources[:] module_list = [] extension_list = [] init_list = [] # Check multiarray api indexes multiarray_api_index = genapi.merge_api_dicts(multiarray_api) genapi.check_api_dict(multiarray_api_index) numpyapi_list = genapi.get_api_functions('NUMPY_API', multiarray_funcs) # FIXME: ordered_funcs_api is unused ordered_funcs_api = genapi.order_dict(multiarray_funcs) # Create dict name -> *Api instance api_name = 'PyArray_API' multiarray_api_dict = {} for f in numpyapi_list: name = f.name index = multiarray_funcs[name][0] annotations = multiarray_funcs[name][1:] multiarray_api_dict[f.name] = FunctionApi(f.name, index, annotations, f.return_type, f.args, api_name) for name, val in global_vars.items(): index, type = val multiarray_api_dict[name] = GlobalVarApi(name, index, type, api_name) for name, val in scalar_bool_values.items(): index = val[0] multiarray_api_dict[name] = BoolValuesApi(name, index, api_name) for name, val in types_api.items(): index = val[0] internal_type = None if len(val) == 1 else val[1] multiarray_api_dict[name] = TypeApi( name, index, 'PyTypeObject', api_name, internal_type) if len(multiarray_api_dict) != len(multiarray_api_index): keys_dict = set(multiarray_api_dict.keys()) keys_index = set(multiarray_api_index.keys()) raise AssertionError( "Multiarray API size mismatch - " "index has extra keys {}, dict has extra keys {}" .format(keys_index - keys_dict, keys_dict - keys_index) ) extension_list = [] for name, index in genapi.order_dict(multiarray_api_index): api_item = multiarray_api_dict[name] extension_list.append(api_item.define_from_array_api_string()) init_list.append(api_item.array_api_define()) module_list.append(api_item.internal_define()) # Write to header s = h_template % ('\n'.join(module_list), '\n'.join(extension_list)) genapi.write_file(header_file, s) # Write to c-code s = c_template % ',\n'.join(init_list) genapi.write_file(c_file, s) # write to documentation s = c_api_header for func in numpyapi_list: s += func.to_ReST() s += '\n\n' genapi.write_file(doc_file, s) return targets
import numpy as np import functools import sys import pytest from numpy.lib.shape_base import ( apply_along_axis, apply_over_axes, array_split, split, hsplit, dsplit, vsplit, dstack, column_stack, kron, tile, expand_dims, take_along_axis, put_along_axis ) from numpy.testing import ( assert_, assert_equal, assert_array_equal, assert_raises, assert_warns ) IS_64BIT = sys.maxsize > 2**32 def _add_keepdims(func): """ hack in keepdims behavior into a function taking an axis """ @functools.wraps(func) def wrapped(a, axis, **kwargs): res = func(a, axis=axis, **kwargs) if axis is None: axis = 0 # res is now a scalar, so we can insert this anywhere return np.expand_dims(res, axis=axis) return wrapped class TestTakeAlongAxis: def test_argequivalent(self): """ Test it translates from arg<func> to <func> """ from numpy.random import rand a = rand(3, 4, 5) funcs = [ (np.sort, np.argsort, dict()), (_add_keepdims(np.min), _add_keepdims(np.argmin), dict()), (_add_keepdims(np.max), _add_keepdims(np.argmax), dict()), (np.partition, np.argpartition, dict(kth=2)), ] for func, argfunc, kwargs in funcs: for axis in list(range(a.ndim)) + [None]: a_func = func(a, axis=axis, **kwargs) ai_func = argfunc(a, axis=axis, **kwargs) assert_equal(a_func, take_along_axis(a, ai_func, axis=axis)) def test_invalid(self): """ Test it errors when indices has too few dimensions """ a = np.ones((10, 10)) ai = np.ones((10, 2), dtype=np.intp) # sanity check take_along_axis(a, ai, axis=1) # not enough indices assert_raises(ValueError, take_along_axis, a, np.array(1), axis=1) # bool arrays not allowed assert_raises(IndexError, take_along_axis, a, ai.astype(bool), axis=1) # float arrays not allowed assert_raises(IndexError, take_along_axis, a, ai.astype(float), axis=1) # invalid axis assert_raises(np.AxisError, take_along_axis, a, ai, axis=10) def test_empty(self): """ Test everything is ok with empty results, even with inserted dims """ a = np.ones((3, 4, 5)) ai = np.ones((3, 0, 5), dtype=np.intp) actual = take_along_axis(a, ai, axis=1) assert_equal(actual.shape, ai.shape) def test_broadcast(self): """ Test that non-indexing dimensions are broadcast in both directions """ a = np.ones((3, 4, 1)) ai = np.ones((1, 2, 5), dtype=np.intp) actual = take_along_axis(a, ai, axis=1) assert_equal(actual.shape, (3, 2, 5)) class TestPutAlongAxis: def test_replace_max(self): a_base = np.array([[10, 30, 20], [60, 40, 50]]) for axis in list(range(a_base.ndim)) + [None]: # we mutate this in the loop a = a_base.copy() # replace the max with a small value i_max = _add_keepdims(np.argmax)(a, axis=axis) put_along_axis(a, i_max, -99, axis=axis) # find the new minimum, which should max i_min = _add_keepdims(np.argmin)(a, axis=axis) assert_equal(i_min, i_max) def test_broadcast(self): """ Test that non-indexing dimensions are broadcast in both directions """ a = np.ones((3, 4, 1)) ai = np.arange(10, dtype=np.intp).reshape((1, 2, 5)) % 4 put_along_axis(a, ai, 20, axis=1) assert_equal(take_along_axis(a, ai, axis=1), 20) class TestApplyAlongAxis: def test_simple(self): a = np.ones((20, 10), 'd') assert_array_equal( apply_along_axis(len, 0, a), len(a)*np.ones(a.shape[1])) def test_simple101(self): a = np.ones((10, 101), 'd') assert_array_equal( apply_along_axis(len, 0, a), len(a)*np.ones(a.shape[1])) def test_3d(self): a = np.arange(27).reshape((3, 3, 3)) assert_array_equal(apply_along_axis(np.sum, 0, a), [[27, 30, 33], [36, 39, 42], [45, 48, 51]]) def test_preserve_subclass(self): def double(row): return row * 2 class MyNDArray(np.ndarray): pass m = np.array([[0, 1], [2, 3]]).view(MyNDArray) expected = np.array([[0, 2], [4, 6]]).view(MyNDArray) result = apply_along_axis(double, 0, m) assert_(isinstance(result, MyNDArray)) assert_array_equal(result, expected) result = apply_along_axis(double, 1, m) assert_(isinstance(result, MyNDArray)) assert_array_equal(result, expected) def test_subclass(self): class MinimalSubclass(np.ndarray): data = 1 def minimal_function(array): return array.data a = np.zeros((6, 3)).view(MinimalSubclass) assert_array_equal( apply_along_axis(minimal_function, 0, a), np.array([1, 1, 1]) ) def test_scalar_array(self, cls=np.ndarray): a = np.ones((6, 3)).view(cls) res = apply_along_axis(np.sum, 0, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([6, 6, 6]).view(cls)) def test_0d_array(self, cls=np.ndarray): def sum_to_0d(x): """ Sum x, returning a 0d array of the same class """ assert_equal(x.ndim, 1) return np.squeeze(np.sum(x, keepdims=True)) a = np.ones((6, 3)).view(cls) res = apply_along_axis(sum_to_0d, 0, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([6, 6, 6]).view(cls)) res = apply_along_axis(sum_to_0d, 1, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([3, 3, 3, 3, 3, 3]).view(cls)) def test_axis_insertion(self, cls=np.ndarray): def f1to2(x): """produces an asymmetric non-square matrix from x""" assert_equal(x.ndim, 1) return (x[::-1] * x[1:,None]).view(cls) a2d = np.arange(6*3).reshape((6, 3)) # 2d insertion along first axis actual = apply_along_axis(f1to2, 0, a2d) expected = np.stack([ f1to2(a2d[:,i]) for i in range(a2d.shape[1]) ], axis=-1).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) # 2d insertion along last axis actual = apply_along_axis(f1to2, 1, a2d) expected = np.stack([ f1to2(a2d[i,:]) for i in range(a2d.shape[0]) ], axis=0).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) # 3d insertion along middle axis a3d = np.arange(6*5*3).reshape((6, 5, 3)) actual = apply_along_axis(f1to2, 1, a3d) expected = np.stack([ np.stack([ f1to2(a3d[i,:,j]) for i in range(a3d.shape[0]) ], axis=0) for j in range(a3d.shape[2]) ], axis=-1).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) def test_subclass_preservation(self): class MinimalSubclass(np.ndarray): pass self.test_scalar_array(MinimalSubclass) self.test_0d_array(MinimalSubclass) self.test_axis_insertion(MinimalSubclass) def test_axis_insertion_ma(self): def f1to2(x): """produces an asymmetric non-square matrix from x""" assert_equal(x.ndim, 1) res = x[::-1] * x[1:,None] return np.ma.masked_where(res%5==0, res) a = np.arange(6*3).reshape((6, 3)) res = apply_along_axis(f1to2, 0, a) assert_(isinstance(res, np.ma.masked_array)) assert_equal(res.ndim, 3) assert_array_equal(res[:,:,0].mask, f1to2(a[:,0]).mask) assert_array_equal(res[:,:,1].mask, f1to2(a[:,1]).mask) assert_array_equal(res[:,:,2].mask, f1to2(a[:,2]).mask) def test_tuple_func1d(self): def sample_1d(x): return x[1], x[0] res = np.apply_along_axis(sample_1d, 1, np.array([[1, 2], [3, 4]])) assert_array_equal(res, np.array([[2, 1], [4, 3]])) def test_empty(self): # can't apply_along_axis when there's no chance to call the function def never_call(x): assert_(False) # should never be reached a = np.empty((0, 0)) assert_raises(ValueError, np.apply_along_axis, never_call, 0, a) assert_raises(ValueError, np.apply_along_axis, never_call, 1, a) # but it's sometimes ok with some non-zero dimensions def empty_to_1(x): assert_(len(x) == 0) return 1 a = np.empty((10, 0)) actual = np.apply_along_axis(empty_to_1, 1, a) assert_equal(actual, np.ones(10)) assert_raises(ValueError, np.apply_along_axis, empty_to_1, 0, a) def test_with_iterable_object(self): # from issue 5248 d = np.array([ [{1, 11}, {2, 22}, {3, 33}], [{4, 44}, {5, 55}, {6, 66}] ]) actual = np.apply_along_axis(lambda a: set.union(*a), 0, d) expected = np.array([{1, 11, 4, 44}, {2, 22, 5, 55}, {3, 33, 6, 66}]) assert_equal(actual, expected) # issue 8642 - assert_equal doesn't detect this! for i in np.ndindex(actual.shape): assert_equal(type(actual[i]), type(expected[i])) class TestApplyOverAxes: def test_simple(self): a = np.arange(24).reshape(2, 3, 4) aoa_a = apply_over_axes(np.sum, a, [0, 2]) assert_array_equal(aoa_a, np.array([[[60], [92], [124]]])) class TestExpandDims: def test_functionality(self): s = (2, 3, 4, 5) a = np.empty(s) for axis in range(-5, 4): b = expand_dims(a, axis) assert_(b.shape[axis] == 1) assert_(np.squeeze(b).shape == s) def test_axis_tuple(self): a = np.empty((3, 3, 3)) assert np.expand_dims(a, axis=(0, 1, 2)).shape == (1, 1, 1, 3, 3, 3) assert np.expand_dims(a, axis=(0, -1, -2)).shape == (1, 3, 3, 3, 1, 1) assert np.expand_dims(a, axis=(0, 3, 5)).shape == (1, 3, 3, 1, 3, 1) assert np.expand_dims(a, axis=(0, -3, -5)).shape == (1, 1, 3, 1, 3, 3) def test_axis_out_of_range(self): s = (2, 3, 4, 5) a = np.empty(s) assert_raises(np.AxisError, expand_dims, a, -6) assert_raises(np.AxisError, expand_dims, a, 5) a = np.empty((3, 3, 3)) assert_raises(np.AxisError, expand_dims, a, (0, -6)) assert_raises(np.AxisError, expand_dims, a, (0, 5)) def test_repeated_axis(self): a = np.empty((3, 3, 3)) assert_raises(ValueError, expand_dims, a, axis=(1, 1)) def test_subclasses(self): a = np.arange(10).reshape((2, 5)) a = np.ma.array(a, mask=a%3 == 0) expanded = np.expand_dims(a, axis=1) assert_(isinstance(expanded, np.ma.MaskedArray)) assert_equal(expanded.shape, (2, 1, 5)) assert_equal(expanded.mask.shape, (2, 1, 5)) class TestArraySplit: def test_integer_0_split(self): a = np.arange(10) assert_raises(ValueError, array_split, a, 0) def test_integer_split(self): a = np.arange(10) res = array_split(a, 1) desired = [np.arange(10)] compare_results(res, desired) res = array_split(a, 2) desired = [np.arange(5), np.arange(5, 10)] compare_results(res, desired) res = array_split(a, 3) desired = [np.arange(4), np.arange(4, 7), np.arange(7, 10)] compare_results(res, desired) res = array_split(a, 4) desired = [np.arange(3), np.arange(3, 6), np.arange(6, 8), np.arange(8, 10)] compare_results(res, desired) res = array_split(a, 5) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 8), np.arange(8, 10)] compare_results(res, desired) res = array_split(a, 6) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 7) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 8) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 9) desired = [np.arange(2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 10) desired = [np.arange(1), np.arange(1, 2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 11) desired = [np.arange(1), np.arange(1, 2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10), np.array([])] compare_results(res, desired) def test_integer_split_2D_rows(self): a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3, axis=0) tgt = [np.array([np.arange(10)]), np.array([np.arange(10)]), np.zeros((0, 10))] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) # Same thing for manual splits: res = array_split(a, [0, 1, 2], axis=0) tgt = [np.zeros((0, 10)), np.array([np.arange(10)]), np.array([np.arange(10)])] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) def test_integer_split_2D_cols(self): a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3, axis=-1) desired = [np.array([np.arange(4), np.arange(4)]), np.array([np.arange(4, 7), np.arange(4, 7)]), np.array([np.arange(7, 10), np.arange(7, 10)])] compare_results(res, desired) def test_integer_split_2D_default(self): """ This will fail if we change default axis """ a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3) tgt = [np.array([np.arange(10)]), np.array([np.arange(10)]), np.zeros((0, 10))] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) # perhaps should check higher dimensions @pytest.mark.skipif(not IS_64BIT, reason="Needs 64bit platform") def test_integer_split_2D_rows_greater_max_int32(self): a = np.broadcast_to([0], (1 << 32, 2)) res = array_split(a, 4) chunk = np.broadcast_to([0], (1 << 30, 2)) tgt = [chunk] * 4 for i in range(len(tgt)): assert_equal(res[i].shape, tgt[i].shape) def test_index_split_simple(self): a = np.arange(10) indices = [1, 5, 7] res = array_split(a, indices, axis=-1) desired = [np.arange(0, 1), np.arange(1, 5), np.arange(5, 7), np.arange(7, 10)] compare_results(res, desired) def test_index_split_low_bound(self): a = np.arange(10) indices = [0, 5, 7] res = array_split(a, indices, axis=-1) desired = [np.array([]), np.arange(0, 5), np.arange(5, 7), np.arange(7, 10)] compare_results(res, desired) def test_index_split_high_bound(self): a = np.arange(10) indices = [0, 5, 7, 10, 12] res = array_split(a, indices, axis=-1) desired = [np.array([]), np.arange(0, 5), np.arange(5, 7), np.arange(7, 10), np.array([]), np.array([])] compare_results(res, desired) class TestSplit: # The split function is essentially the same as array_split, # except that it test if splitting will result in an # equal split. Only test for this case. def test_equal_split(self): a = np.arange(10) res = split(a, 2) desired = [np.arange(5), np.arange(5, 10)] compare_results(res, desired) def test_unequal_split(self): a = np.arange(10) assert_raises(ValueError, split, a, 3) class TestColumnStack: def test_non_iterable(self): assert_raises(TypeError, column_stack, 1) def test_1D_arrays(self): # example from docstring a = np.array((1, 2, 3)) b = np.array((2, 3, 4)) expected = np.array([[1, 2], [2, 3], [3, 4]]) actual = np.column_stack((a, b)) assert_equal(actual, expected) def test_2D_arrays(self): # same as hstack 2D docstring example a = np.array([[1], [2], [3]]) b = np.array([[2], [3], [4]]) expected = np.array([[1, 2], [2, 3], [3, 4]]) actual = np.column_stack((a, b)) assert_equal(actual, expected) def test_generator(self): with assert_warns(FutureWarning): column_stack((np.arange(3) for _ in range(2))) class TestDstack: def test_non_iterable(self): assert_raises(TypeError, dstack, 1) def test_0D_array(self): a = np.array(1) b = np.array(2) res = dstack([a, b]) desired = np.array([[[1, 2]]]) assert_array_equal(res, desired) def test_1D_array(self): a = np.array([1]) b = np.array([2]) res = dstack([a, b]) desired = np.array([[[1, 2]]]) assert_array_equal(res, desired) def test_2D_array(self): a = np.array([[1], [2]]) b = np.array([[1], [2]]) res = dstack([a, b]) desired = np.array([[[1, 1]], [[2, 2, ]]]) assert_array_equal(res, desired) def test_2D_array2(self): a = np.array([1, 2]) b = np.array([1, 2]) res = dstack([a, b]) desired = np.array([[[1, 1], [2, 2]]]) assert_array_equal(res, desired) def test_generator(self): with assert_warns(FutureWarning): dstack((np.arange(3) for _ in range(2))) # array_split has more comprehensive test of splitting. # only do simple test on hsplit, vsplit, and dsplit class TestHsplit: """Only testing for integer splits. """ def test_non_iterable(self): assert_raises(ValueError, hsplit, 1, 1) def test_0D_array(self): a = np.array(1) try: hsplit(a, 2) assert_(0) except ValueError: pass def test_1D_array(self): a = np.array([1, 2, 3, 4]) res = hsplit(a, 2) desired = [np.array([1, 2]), np.array([3, 4])] compare_results(res, desired) def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) res = hsplit(a, 2) desired = [np.array([[1, 2], [1, 2]]), np.array([[3, 4], [3, 4]])] compare_results(res, desired) class TestVsplit: """Only testing for integer splits. """ def test_non_iterable(self): assert_raises(ValueError, vsplit, 1, 1) def test_0D_array(self): a = np.array(1) assert_raises(ValueError, vsplit, a, 2) def test_1D_array(self): a = np.array([1, 2, 3, 4]) try: vsplit(a, 2) assert_(0) except ValueError: pass def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) res = vsplit(a, 2) desired = [np.array([[1, 2, 3, 4]]), np.array([[1, 2, 3, 4]])] compare_results(res, desired) class TestDsplit: # Only testing for integer splits. def test_non_iterable(self): assert_raises(ValueError, dsplit, 1, 1) def test_0D_array(self): a = np.array(1) assert_raises(ValueError, dsplit, a, 2) def test_1D_array(self): a = np.array([1, 2, 3, 4]) assert_raises(ValueError, dsplit, a, 2) def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) try: dsplit(a, 2) assert_(0) except ValueError: pass def test_3D_array(self): a = np.array([[[1, 2, 3, 4], [1, 2, 3, 4]], [[1, 2, 3, 4], [1, 2, 3, 4]]]) res = dsplit(a, 2) desired = [np.array([[[1, 2], [1, 2]], [[1, 2], [1, 2]]]), np.array([[[3, 4], [3, 4]], [[3, 4], [3, 4]]])] compare_results(res, desired) class TestSqueeze: def test_basic(self): from numpy.random import rand a = rand(20, 10, 10, 1, 1) b = rand(20, 1, 10, 1, 20) c = rand(1, 1, 20, 10) assert_array_equal(np.squeeze(a), np.reshape(a, (20, 10, 10))) assert_array_equal(np.squeeze(b), np.reshape(b, (20, 10, 20))) assert_array_equal(np.squeeze(c), np.reshape(c, (20, 10))) # Squeezing to 0-dim should still give an ndarray a = [[[1.5]]] res = np.squeeze(a) assert_equal(res, 1.5) assert_equal(res.ndim, 0) assert_equal(type(res), np.ndarray) class TestKron: def test_return_type(self): class myarray(np.ndarray): __array_priority__ = 0.0 a = np.ones([2, 2]) ma = myarray(a.shape, a.dtype, a.data) assert_equal(type(kron(a, a)), np.ndarray) assert_equal(type(kron(ma, ma)), myarray) assert_equal(type(kron(a, ma)), np.ndarray) assert_equal(type(kron(ma, a)), myarray) class TestTile: def test_basic(self): a = np.array([0, 1, 2]) b = [[1, 2], [3, 4]] assert_equal(tile(a, 2), [0, 1, 2, 0, 1, 2]) assert_equal(tile(a, (2, 2)), [[0, 1, 2, 0, 1, 2], [0, 1, 2, 0, 1, 2]]) assert_equal(tile(a, (1, 2)), [[0, 1, 2, 0, 1, 2]]) assert_equal(tile(b, 2), [[1, 2, 1, 2], [3, 4, 3, 4]]) assert_equal(tile(b, (2, 1)), [[1, 2], [3, 4], [1, 2], [3, 4]]) assert_equal(tile(b, (2, 2)), [[1, 2, 1, 2], [3, 4, 3, 4], [1, 2, 1, 2], [3, 4, 3, 4]]) def test_tile_one_repetition_on_array_gh4679(self): a = np.arange(5) b = tile(a, 1) b += 2 assert_equal(a, np.arange(5)) def test_empty(self): a = np.array([[[]]]) b = np.array([[], []]) c = tile(b, 2).shape d = tile(a, (3, 2, 5)).shape assert_equal(c, (2, 0)) assert_equal(d, (3, 2, 0)) def test_kroncompare(self): from numpy.random import randint reps = [(2,), (1, 2), (2, 1), (2, 2), (2, 3, 2), (3, 2)] shape = [(3,), (2, 3), (3, 4, 3), (3, 2, 3), (4, 3, 2, 4), (2, 2)] for s in shape: b = randint(0, 10, size=s) for r in reps: a = np.ones(r, b.dtype) large = tile(b, r) klarge = kron(a, b) assert_equal(large, klarge) class TestMayShareMemory: def test_basic(self): d = np.ones((50, 60)) d2 = np.ones((30, 60, 6)) assert_(np.may_share_memory(d, d)) assert_(np.may_share_memory(d, d[::-1])) assert_(np.may_share_memory(d, d[::2])) assert_(np.may_share_memory(d, d[1:, ::-1])) assert_(not np.may_share_memory(d[::-1], d2)) assert_(not np.may_share_memory(d[::2], d2)) assert_(not np.may_share_memory(d[1:, ::-1], d2)) assert_(np.may_share_memory(d2[1:, ::-1], d2)) # Utility def compare_results(res, desired): for i in range(len(desired)): assert_array_equal(res[i], desired[i])
mhvk/numpy
numpy/lib/tests/test_shape_base.py
numpy/core/code_generators/generate_numpy_api.py
""" The arraypad module contains a group of functions to pad values onto the edges of an n-dimensional array. """ import numpy as np from numpy.core.overrides import array_function_dispatch from numpy.lib.index_tricks import ndindex __all__ = ['pad'] ############################################################################### # Private utility functions. def _round_if_needed(arr, dtype): """ Rounds arr inplace if destination dtype is integer. Parameters ---------- arr : ndarray Input array. dtype : dtype The dtype of the destination array. """ if np.issubdtype(dtype, np.integer): arr.round(out=arr) def _slice_at_axis(sl, axis): """ Construct tuple of slices to slice an array in the given dimension. Parameters ---------- sl : slice The slice for the given dimension. axis : int The axis to which `sl` is applied. All other dimensions are left "unsliced". Returns ------- sl : tuple of slices A tuple with slices matching `shape` in length. Examples -------- >>> _slice_at_axis(slice(None, 3, -1), 1) (slice(None, None, None), slice(None, 3, -1), (...,)) """ return (slice(None),) * axis + (sl,) + (...,) def _view_roi(array, original_area_slice, axis): """ Get a view of the current region of interest during iterative padding. When padding multiple dimensions iteratively corner values are unnecessarily overwritten multiple times. This function reduces the working area for the first dimensions so that corners are excluded. Parameters ---------- array : ndarray The array with the region of interest. original_area_slice : tuple of slices Denotes the area with original values of the unpadded array. axis : int The currently padded dimension assuming that `axis` is padded before `axis` + 1. Returns ------- roi : ndarray The region of interest of the original `array`. """ axis += 1 sl = (slice(None),) * axis + original_area_slice[axis:] return array[sl] def _pad_simple(array, pad_width, fill_value=None): """ Pad array on all sides with either a single value or undefined values. Parameters ---------- array : ndarray Array to grow. pad_width : sequence of tuple[int, int] Pad width on both sides for each dimension in `arr`. fill_value : scalar, optional If provided the padded area is filled with this value, otherwise the pad area left undefined. Returns ------- padded : ndarray The padded array with the same dtype as`array`. Its order will default to C-style if `array` is not F-contiguous. original_area_slice : tuple A tuple of slices pointing to the area of the original array. """ # Allocate grown array new_shape = tuple( left + size + right for size, (left, right) in zip(array.shape, pad_width) ) order = 'F' if array.flags.fnc else 'C' # Fortran and not also C-order padded = np.empty(new_shape, dtype=array.dtype, order=order) if fill_value is not None: padded.fill(fill_value) # Copy old array into correct space original_area_slice = tuple( slice(left, left + size) for size, (left, right) in zip(array.shape, pad_width) ) padded[original_area_slice] = array return padded, original_area_slice def _set_pad_area(padded, axis, width_pair, value_pair): """ Set empty-padded area in given dimension. Parameters ---------- padded : ndarray Array with the pad area which is modified inplace. axis : int Dimension with the pad area to set. width_pair : (int, int) Pair of widths that mark the pad area on both sides in the given dimension. value_pair : tuple of scalars or ndarrays Values inserted into the pad area on each side. It must match or be broadcastable to the shape of `arr`. """ left_slice = _slice_at_axis(slice(None, width_pair[0]), axis) padded[left_slice] = value_pair[0] right_slice = _slice_at_axis( slice(padded.shape[axis] - width_pair[1], None), axis) padded[right_slice] = value_pair[1] def _get_edges(padded, axis, width_pair): """ Retrieve edge values from empty-padded array in given dimension. Parameters ---------- padded : ndarray Empty-padded array. axis : int Dimension in which the edges are considered. width_pair : (int, int) Pair of widths that mark the pad area on both sides in the given dimension. Returns ------- left_edge, right_edge : ndarray Edge values of the valid area in `padded` in the given dimension. Its shape will always match `padded` except for the dimension given by `axis` which will have a length of 1. """ left_index = width_pair[0] left_slice = _slice_at_axis(slice(left_index, left_index + 1), axis) left_edge = padded[left_slice] right_index = padded.shape[axis] - width_pair[1] right_slice = _slice_at_axis(slice(right_index - 1, right_index), axis) right_edge = padded[right_slice] return left_edge, right_edge def _get_linear_ramps(padded, axis, width_pair, end_value_pair): """ Construct linear ramps for empty-padded array in given dimension. Parameters ---------- padded : ndarray Empty-padded array. axis : int Dimension in which the ramps are constructed. width_pair : (int, int) Pair of widths that mark the pad area on both sides in the given dimension. end_value_pair : (scalar, scalar) End values for the linear ramps which form the edge of the fully padded array. These values are included in the linear ramps. Returns ------- left_ramp, right_ramp : ndarray Linear ramps to set on both sides of `padded`. """ edge_pair = _get_edges(padded, axis, width_pair) left_ramp, right_ramp = ( np.linspace( start=end_value, stop=edge.squeeze(axis), # Dimension is replaced by linspace num=width, endpoint=False, dtype=padded.dtype, axis=axis ) for end_value, edge, width in zip( end_value_pair, edge_pair, width_pair ) ) # Reverse linear space in appropriate dimension right_ramp = right_ramp[_slice_at_axis(slice(None, None, -1), axis)] return left_ramp, right_ramp def _get_stats(padded, axis, width_pair, length_pair, stat_func): """ Calculate statistic for the empty-padded array in given dimension. Parameters ---------- padded : ndarray Empty-padded array. axis : int Dimension in which the statistic is calculated. width_pair : (int, int) Pair of widths that mark the pad area on both sides in the given dimension. length_pair : 2-element sequence of None or int Gives the number of values in valid area from each side that is taken into account when calculating the statistic. If None the entire valid area in `padded` is considered. stat_func : function Function to compute statistic. The expected signature is ``stat_func(x: ndarray, axis: int, keepdims: bool) -> ndarray``. Returns ------- left_stat, right_stat : ndarray Calculated statistic for both sides of `padded`. """ # Calculate indices of the edges of the area with original values left_index = width_pair[0] right_index = padded.shape[axis] - width_pair[1] # as well as its length max_length = right_index - left_index # Limit stat_lengths to max_length left_length, right_length = length_pair if left_length is None or max_length < left_length: left_length = max_length if right_length is None or max_length < right_length: right_length = max_length if (left_length == 0 or right_length == 0) \ and stat_func in {np.amax, np.amin}: # amax and amin can't operate on an empty array, # raise a more descriptive warning here instead of the default one raise ValueError("stat_length of 0 yields no value for padding") # Calculate statistic for the left side left_slice = _slice_at_axis( slice(left_index, left_index + left_length), axis) left_chunk = padded[left_slice] left_stat = stat_func(left_chunk, axis=axis, keepdims=True) _round_if_needed(left_stat, padded.dtype) if left_length == right_length == max_length: # return early as right_stat must be identical to left_stat return left_stat, left_stat # Calculate statistic for the right side right_slice = _slice_at_axis( slice(right_index - right_length, right_index), axis) right_chunk = padded[right_slice] right_stat = stat_func(right_chunk, axis=axis, keepdims=True) _round_if_needed(right_stat, padded.dtype) return left_stat, right_stat def _set_reflect_both(padded, axis, width_pair, method, include_edge=False): """ Pad `axis` of `arr` with reflection. Parameters ---------- padded : ndarray Input array of arbitrary shape. axis : int Axis along which to pad `arr`. width_pair : (int, int) Pair of widths that mark the pad area on both sides in the given dimension. method : str Controls method of reflection; options are 'even' or 'odd'. include_edge : bool If true, edge value is included in reflection, otherwise the edge value forms the symmetric axis to the reflection. Returns ------- pad_amt : tuple of ints, length 2 New index positions of padding to do along the `axis`. If these are both 0, padding is done in this dimension. """ left_pad, right_pad = width_pair old_length = padded.shape[axis] - right_pad - left_pad if include_edge: # Edge is included, we need to offset the pad amount by 1 edge_offset = 1 else: edge_offset = 0 # Edge is not included, no need to offset pad amount old_length -= 1 # but must be omitted from the chunk if left_pad > 0: # Pad with reflected values on left side: # First limit chunk size which can't be larger than pad area chunk_length = min(old_length, left_pad) # Slice right to left, stop on or next to edge, start relative to stop stop = left_pad - edge_offset start = stop + chunk_length left_slice = _slice_at_axis(slice(start, stop, -1), axis) left_chunk = padded[left_slice] if method == "odd": # Negate chunk and align with edge edge_slice = _slice_at_axis(slice(left_pad, left_pad + 1), axis) left_chunk = 2 * padded[edge_slice] - left_chunk # Insert chunk into padded area start = left_pad - chunk_length stop = left_pad pad_area = _slice_at_axis(slice(start, stop), axis) padded[pad_area] = left_chunk # Adjust pointer to left edge for next iteration left_pad -= chunk_length if right_pad > 0: # Pad with reflected values on right side: # First limit chunk size which can't be larger than pad area chunk_length = min(old_length, right_pad) # Slice right to left, start on or next to edge, stop relative to start start = -right_pad + edge_offset - 2 stop = start - chunk_length right_slice = _slice_at_axis(slice(start, stop, -1), axis) right_chunk = padded[right_slice] if method == "odd": # Negate chunk and align with edge edge_slice = _slice_at_axis( slice(-right_pad - 1, -right_pad), axis) right_chunk = 2 * padded[edge_slice] - right_chunk # Insert chunk into padded area start = padded.shape[axis] - right_pad stop = start + chunk_length pad_area = _slice_at_axis(slice(start, stop), axis) padded[pad_area] = right_chunk # Adjust pointer to right edge for next iteration right_pad -= chunk_length return left_pad, right_pad def _set_wrap_both(padded, axis, width_pair): """ Pad `axis` of `arr` with wrapped values. Parameters ---------- padded : ndarray Input array of arbitrary shape. axis : int Axis along which to pad `arr`. width_pair : (int, int) Pair of widths that mark the pad area on both sides in the given dimension. Returns ------- pad_amt : tuple of ints, length 2 New index positions of padding to do along the `axis`. If these are both 0, padding is done in this dimension. """ left_pad, right_pad = width_pair period = padded.shape[axis] - right_pad - left_pad # If the current dimension of `arr` doesn't contain enough valid values # (not part of the undefined pad area) we need to pad multiple times. # Each time the pad area shrinks on both sides which is communicated with # these variables. new_left_pad = 0 new_right_pad = 0 if left_pad > 0: # Pad with wrapped values on left side # First slice chunk from right side of the non-pad area. # Use min(period, left_pad) to ensure that chunk is not larger than # pad area right_slice = _slice_at_axis( slice(-right_pad - min(period, left_pad), -right_pad if right_pad != 0 else None), axis ) right_chunk = padded[right_slice] if left_pad > period: # Chunk is smaller than pad area pad_area = _slice_at_axis(slice(left_pad - period, left_pad), axis) new_left_pad = left_pad - period else: # Chunk matches pad area pad_area = _slice_at_axis(slice(None, left_pad), axis) padded[pad_area] = right_chunk if right_pad > 0: # Pad with wrapped values on right side # First slice chunk from left side of the non-pad area. # Use min(period, right_pad) to ensure that chunk is not larger than # pad area left_slice = _slice_at_axis( slice(left_pad, left_pad + min(period, right_pad),), axis) left_chunk = padded[left_slice] if right_pad > period: # Chunk is smaller than pad area pad_area = _slice_at_axis( slice(-right_pad, -right_pad + period), axis) new_right_pad = right_pad - period else: # Chunk matches pad area pad_area = _slice_at_axis(slice(-right_pad, None), axis) padded[pad_area] = left_chunk return new_left_pad, new_right_pad def _as_pairs(x, ndim, as_index=False): """ Broadcast `x` to an array with the shape (`ndim`, 2). A helper function for `pad` that prepares and validates arguments like `pad_width` for iteration in pairs. Parameters ---------- x : {None, scalar, array-like} The object to broadcast to the shape (`ndim`, 2). ndim : int Number of pairs the broadcasted `x` will have. as_index : bool, optional If `x` is not None, try to round each element of `x` to an integer (dtype `np.intp`) and ensure every element is positive. Returns ------- pairs : nested iterables, shape (`ndim`, 2) The broadcasted version of `x`. Raises ------ ValueError If `as_index` is True and `x` contains negative elements. Or if `x` is not broadcastable to the shape (`ndim`, 2). """ if x is None: # Pass through None as a special case, otherwise np.round(x) fails # with an AttributeError return ((None, None),) * ndim x = np.array(x) if as_index: x = np.round(x).astype(np.intp, copy=False) if x.ndim < 3: # Optimization: Possibly use faster paths for cases where `x` has # only 1 or 2 elements. `np.broadcast_to` could handle these as well # but is currently slower if x.size == 1: # x was supplied as a single value x = x.ravel() # Ensure x[0] works for x.ndim == 0, 1, 2 if as_index and x < 0: raise ValueError("index can't contain negative values") return ((x[0], x[0]),) * ndim if x.size == 2 and x.shape != (2, 1): # x was supplied with a single value for each side # but except case when each dimension has a single value # which should be broadcasted to a pair, # e.g. [[1], [2]] -> [[1, 1], [2, 2]] not [[1, 2], [1, 2]] x = x.ravel() # Ensure x[0], x[1] works if as_index and (x[0] < 0 or x[1] < 0): raise ValueError("index can't contain negative values") return ((x[0], x[1]),) * ndim if as_index and x.min() < 0: raise ValueError("index can't contain negative values") # Converting the array with `tolist` seems to improve performance # when iterating and indexing the result (see usage in `pad`) return np.broadcast_to(x, (ndim, 2)).tolist() def _pad_dispatcher(array, pad_width, mode=None, **kwargs): return (array,) ############################################################################### # Public functions @array_function_dispatch(_pad_dispatcher, module='numpy') def pad(array, pad_width, mode='constant', **kwargs): """ Pad an array. Parameters ---------- array : array_like of rank N The array to pad. pad_width : {sequence, array_like, int} Number of values padded to the edges of each axis. ((before_1, after_1), ... (before_N, after_N)) unique pad widths for each axis. ((before, after),) yields same before and after pad for each axis. (pad,) or int is a shortcut for before = after = pad width for all axes. mode : str or function, optional One of the following string values or a user supplied function. 'constant' (default) Pads with a constant value. 'edge' Pads with the edge values of array. 'linear_ramp' Pads with the linear ramp between end_value and the array edge value. 'maximum' Pads with the maximum value of all or part of the vector along each axis. 'mean' Pads with the mean value of all or part of the vector along each axis. 'median' Pads with the median value of all or part of the vector along each axis. 'minimum' Pads with the minimum value of all or part of the vector along each axis. 'reflect' Pads with the reflection of the vector mirrored on the first and last values of the vector along each axis. 'symmetric' Pads with the reflection of the vector mirrored along the edge of the array. 'wrap' Pads with the wrap of the vector along the axis. The first values are used to pad the end and the end values are used to pad the beginning. 'empty' Pads with undefined values. .. versionadded:: 1.17 <function> Padding function, see Notes. stat_length : sequence or int, optional Used in 'maximum', 'mean', 'median', and 'minimum'. Number of values at edge of each axis used to calculate the statistic value. ((before_1, after_1), ... (before_N, after_N)) unique statistic lengths for each axis. ((before, after),) yields same before and after statistic lengths for each axis. (stat_length,) or int is a shortcut for before = after = statistic length for all axes. Default is ``None``, to use the entire axis. constant_values : sequence or scalar, optional Used in 'constant'. The values to set the padded values for each axis. ``((before_1, after_1), ... (before_N, after_N))`` unique pad constants for each axis. ``((before, after),)`` yields same before and after constants for each axis. ``(constant,)`` or ``constant`` is a shortcut for ``before = after = constant`` for all axes. Default is 0. end_values : sequence or scalar, optional Used in 'linear_ramp'. The values used for the ending value of the linear_ramp and that will form the edge of the padded array. ``((before_1, after_1), ... (before_N, after_N))`` unique end values for each axis. ``((before, after),)`` yields same before and after end values for each axis. ``(constant,)`` or ``constant`` is a shortcut for ``before = after = constant`` for all axes. Default is 0. reflect_type : {'even', 'odd'}, optional Used in 'reflect', and 'symmetric'. The 'even' style is the default with an unaltered reflection around the edge value. For the 'odd' style, the extended part of the array is created by subtracting the reflected values from two times the edge value. Returns ------- pad : ndarray Padded array of rank equal to `array` with shape increased according to `pad_width`. Notes ----- .. versionadded:: 1.7.0 For an array with rank greater than 1, some of the padding of later axes is calculated from padding of previous axes. This is easiest to think about with a rank 2 array where the corners of the padded array are calculated by using padded values from the first axis. The padding function, if used, should modify a rank 1 array in-place. It has the following signature:: padding_func(vector, iaxis_pad_width, iaxis, kwargs) where vector : ndarray A rank 1 array already padded with zeros. Padded values are vector[:iaxis_pad_width[0]] and vector[-iaxis_pad_width[1]:]. iaxis_pad_width : tuple A 2-tuple of ints, iaxis_pad_width[0] represents the number of values padded at the beginning of vector where iaxis_pad_width[1] represents the number of values padded at the end of vector. iaxis : int The axis currently being calculated. kwargs : dict Any keyword arguments the function requires. Examples -------- >>> a = [1, 2, 3, 4, 5] >>> np.pad(a, (2, 3), 'constant', constant_values=(4, 6)) array([4, 4, 1, ..., 6, 6, 6]) >>> np.pad(a, (2, 3), 'edge') array([1, 1, 1, ..., 5, 5, 5]) >>> np.pad(a, (2, 3), 'linear_ramp', end_values=(5, -4)) array([ 5, 3, 1, 2, 3, 4, 5, 2, -1, -4]) >>> np.pad(a, (2,), 'maximum') array([5, 5, 1, 2, 3, 4, 5, 5, 5]) >>> np.pad(a, (2,), 'mean') array([3, 3, 1, 2, 3, 4, 5, 3, 3]) >>> np.pad(a, (2,), 'median') array([3, 3, 1, 2, 3, 4, 5, 3, 3]) >>> a = [[1, 2], [3, 4]] >>> np.pad(a, ((3, 2), (2, 3)), 'minimum') array([[1, 1, 1, 2, 1, 1, 1], [1, 1, 1, 2, 1, 1, 1], [1, 1, 1, 2, 1, 1, 1], [1, 1, 1, 2, 1, 1, 1], [3, 3, 3, 4, 3, 3, 3], [1, 1, 1, 2, 1, 1, 1], [1, 1, 1, 2, 1, 1, 1]]) >>> a = [1, 2, 3, 4, 5] >>> np.pad(a, (2, 3), 'reflect') array([3, 2, 1, 2, 3, 4, 5, 4, 3, 2]) >>> np.pad(a, (2, 3), 'reflect', reflect_type='odd') array([-1, 0, 1, 2, 3, 4, 5, 6, 7, 8]) >>> np.pad(a, (2, 3), 'symmetric') array([2, 1, 1, 2, 3, 4, 5, 5, 4, 3]) >>> np.pad(a, (2, 3), 'symmetric', reflect_type='odd') array([0, 1, 1, 2, 3, 4, 5, 5, 6, 7]) >>> np.pad(a, (2, 3), 'wrap') array([4, 5, 1, 2, 3, 4, 5, 1, 2, 3]) >>> def pad_with(vector, pad_width, iaxis, kwargs): ... pad_value = kwargs.get('padder', 10) ... vector[:pad_width[0]] = pad_value ... vector[-pad_width[1]:] = pad_value >>> a = np.arange(6) >>> a = a.reshape((2, 3)) >>> np.pad(a, 2, pad_with) array([[10, 10, 10, 10, 10, 10, 10], [10, 10, 10, 10, 10, 10, 10], [10, 10, 0, 1, 2, 10, 10], [10, 10, 3, 4, 5, 10, 10], [10, 10, 10, 10, 10, 10, 10], [10, 10, 10, 10, 10, 10, 10]]) >>> np.pad(a, 2, pad_with, padder=100) array([[100, 100, 100, 100, 100, 100, 100], [100, 100, 100, 100, 100, 100, 100], [100, 100, 0, 1, 2, 100, 100], [100, 100, 3, 4, 5, 100, 100], [100, 100, 100, 100, 100, 100, 100], [100, 100, 100, 100, 100, 100, 100]]) """ array = np.asarray(array) pad_width = np.asarray(pad_width) if not pad_width.dtype.kind == 'i': raise TypeError('`pad_width` must be of integral type.') # Broadcast to shape (array.ndim, 2) pad_width = _as_pairs(pad_width, array.ndim, as_index=True) if callable(mode): # Old behavior: Use user-supplied function with np.apply_along_axis function = mode # Create a new zero padded array padded, _ = _pad_simple(array, pad_width, fill_value=0) # And apply along each axis for axis in range(padded.ndim): # Iterate using ndindex as in apply_along_axis, but assuming that # function operates inplace on the padded array. # view with the iteration axis at the end view = np.moveaxis(padded, axis, -1) # compute indices for the iteration axes, and append a trailing # ellipsis to prevent 0d arrays decaying to scalars (gh-8642) inds = ndindex(view.shape[:-1]) inds = (ind + (Ellipsis,) for ind in inds) for ind in inds: function(view[ind], pad_width[axis], axis, kwargs) return padded # Make sure that no unsupported keywords were passed for the current mode allowed_kwargs = { 'empty': [], 'edge': [], 'wrap': [], 'constant': ['constant_values'], 'linear_ramp': ['end_values'], 'maximum': ['stat_length'], 'mean': ['stat_length'], 'median': ['stat_length'], 'minimum': ['stat_length'], 'reflect': ['reflect_type'], 'symmetric': ['reflect_type'], } try: unsupported_kwargs = set(kwargs) - set(allowed_kwargs[mode]) except KeyError: raise ValueError("mode '{}' is not supported".format(mode)) from None if unsupported_kwargs: raise ValueError("unsupported keyword arguments for mode '{}': {}" .format(mode, unsupported_kwargs)) stat_functions = {"maximum": np.amax, "minimum": np.amin, "mean": np.mean, "median": np.median} # Create array with final shape and original values # (padded area is undefined) padded, original_area_slice = _pad_simple(array, pad_width) # And prepare iteration over all dimensions # (zipping may be more readable than using enumerate) axes = range(padded.ndim) if mode == "constant": values = kwargs.get("constant_values", 0) values = _as_pairs(values, padded.ndim) for axis, width_pair, value_pair in zip(axes, pad_width, values): roi = _view_roi(padded, original_area_slice, axis) _set_pad_area(roi, axis, width_pair, value_pair) elif mode == "empty": pass # Do nothing as _pad_simple already returned the correct result elif array.size == 0: # Only modes "constant" and "empty" can extend empty axes, all other # modes depend on `array` not being empty # -> ensure every empty axis is only "padded with 0" for axis, width_pair in zip(axes, pad_width): if array.shape[axis] == 0 and any(width_pair): raise ValueError( "can't extend empty axis {} using modes other than " "'constant' or 'empty'".format(axis) ) # passed, don't need to do anything more as _pad_simple already # returned the correct result elif mode == "edge": for axis, width_pair in zip(axes, pad_width): roi = _view_roi(padded, original_area_slice, axis) edge_pair = _get_edges(roi, axis, width_pair) _set_pad_area(roi, axis, width_pair, edge_pair) elif mode == "linear_ramp": end_values = kwargs.get("end_values", 0) end_values = _as_pairs(end_values, padded.ndim) for axis, width_pair, value_pair in zip(axes, pad_width, end_values): roi = _view_roi(padded, original_area_slice, axis) ramp_pair = _get_linear_ramps(roi, axis, width_pair, value_pair) _set_pad_area(roi, axis, width_pair, ramp_pair) elif mode in stat_functions: func = stat_functions[mode] length = kwargs.get("stat_length", None) length = _as_pairs(length, padded.ndim, as_index=True) for axis, width_pair, length_pair in zip(axes, pad_width, length): roi = _view_roi(padded, original_area_slice, axis) stat_pair = _get_stats(roi, axis, width_pair, length_pair, func) _set_pad_area(roi, axis, width_pair, stat_pair) elif mode in {"reflect", "symmetric"}: method = kwargs.get("reflect_type", "even") include_edge = True if mode == "symmetric" else False for axis, (left_index, right_index) in zip(axes, pad_width): if array.shape[axis] == 1 and (left_index > 0 or right_index > 0): # Extending singleton dimension for 'reflect' is legacy # behavior; it really should raise an error. edge_pair = _get_edges(padded, axis, (left_index, right_index)) _set_pad_area( padded, axis, (left_index, right_index), edge_pair) continue roi = _view_roi(padded, original_area_slice, axis) while left_index > 0 or right_index > 0: # Iteratively pad until dimension is filled with reflected # values. This is necessary if the pad area is larger than # the length of the original values in the current dimension. left_index, right_index = _set_reflect_both( roi, axis, (left_index, right_index), method, include_edge ) elif mode == "wrap": for axis, (left_index, right_index) in zip(axes, pad_width): roi = _view_roi(padded, original_area_slice, axis) while left_index > 0 or right_index > 0: # Iteratively pad until dimension is filled with wrapped # values. This is necessary if the pad area is larger than # the length of the original values in the current dimension. left_index, right_index = _set_wrap_both( roi, axis, (left_index, right_index)) return padded
import numpy as np import functools import sys import pytest from numpy.lib.shape_base import ( apply_along_axis, apply_over_axes, array_split, split, hsplit, dsplit, vsplit, dstack, column_stack, kron, tile, expand_dims, take_along_axis, put_along_axis ) from numpy.testing import ( assert_, assert_equal, assert_array_equal, assert_raises, assert_warns ) IS_64BIT = sys.maxsize > 2**32 def _add_keepdims(func): """ hack in keepdims behavior into a function taking an axis """ @functools.wraps(func) def wrapped(a, axis, **kwargs): res = func(a, axis=axis, **kwargs) if axis is None: axis = 0 # res is now a scalar, so we can insert this anywhere return np.expand_dims(res, axis=axis) return wrapped class TestTakeAlongAxis: def test_argequivalent(self): """ Test it translates from arg<func> to <func> """ from numpy.random import rand a = rand(3, 4, 5) funcs = [ (np.sort, np.argsort, dict()), (_add_keepdims(np.min), _add_keepdims(np.argmin), dict()), (_add_keepdims(np.max), _add_keepdims(np.argmax), dict()), (np.partition, np.argpartition, dict(kth=2)), ] for func, argfunc, kwargs in funcs: for axis in list(range(a.ndim)) + [None]: a_func = func(a, axis=axis, **kwargs) ai_func = argfunc(a, axis=axis, **kwargs) assert_equal(a_func, take_along_axis(a, ai_func, axis=axis)) def test_invalid(self): """ Test it errors when indices has too few dimensions """ a = np.ones((10, 10)) ai = np.ones((10, 2), dtype=np.intp) # sanity check take_along_axis(a, ai, axis=1) # not enough indices assert_raises(ValueError, take_along_axis, a, np.array(1), axis=1) # bool arrays not allowed assert_raises(IndexError, take_along_axis, a, ai.astype(bool), axis=1) # float arrays not allowed assert_raises(IndexError, take_along_axis, a, ai.astype(float), axis=1) # invalid axis assert_raises(np.AxisError, take_along_axis, a, ai, axis=10) def test_empty(self): """ Test everything is ok with empty results, even with inserted dims """ a = np.ones((3, 4, 5)) ai = np.ones((3, 0, 5), dtype=np.intp) actual = take_along_axis(a, ai, axis=1) assert_equal(actual.shape, ai.shape) def test_broadcast(self): """ Test that non-indexing dimensions are broadcast in both directions """ a = np.ones((3, 4, 1)) ai = np.ones((1, 2, 5), dtype=np.intp) actual = take_along_axis(a, ai, axis=1) assert_equal(actual.shape, (3, 2, 5)) class TestPutAlongAxis: def test_replace_max(self): a_base = np.array([[10, 30, 20], [60, 40, 50]]) for axis in list(range(a_base.ndim)) + [None]: # we mutate this in the loop a = a_base.copy() # replace the max with a small value i_max = _add_keepdims(np.argmax)(a, axis=axis) put_along_axis(a, i_max, -99, axis=axis) # find the new minimum, which should max i_min = _add_keepdims(np.argmin)(a, axis=axis) assert_equal(i_min, i_max) def test_broadcast(self): """ Test that non-indexing dimensions are broadcast in both directions """ a = np.ones((3, 4, 1)) ai = np.arange(10, dtype=np.intp).reshape((1, 2, 5)) % 4 put_along_axis(a, ai, 20, axis=1) assert_equal(take_along_axis(a, ai, axis=1), 20) class TestApplyAlongAxis: def test_simple(self): a = np.ones((20, 10), 'd') assert_array_equal( apply_along_axis(len, 0, a), len(a)*np.ones(a.shape[1])) def test_simple101(self): a = np.ones((10, 101), 'd') assert_array_equal( apply_along_axis(len, 0, a), len(a)*np.ones(a.shape[1])) def test_3d(self): a = np.arange(27).reshape((3, 3, 3)) assert_array_equal(apply_along_axis(np.sum, 0, a), [[27, 30, 33], [36, 39, 42], [45, 48, 51]]) def test_preserve_subclass(self): def double(row): return row * 2 class MyNDArray(np.ndarray): pass m = np.array([[0, 1], [2, 3]]).view(MyNDArray) expected = np.array([[0, 2], [4, 6]]).view(MyNDArray) result = apply_along_axis(double, 0, m) assert_(isinstance(result, MyNDArray)) assert_array_equal(result, expected) result = apply_along_axis(double, 1, m) assert_(isinstance(result, MyNDArray)) assert_array_equal(result, expected) def test_subclass(self): class MinimalSubclass(np.ndarray): data = 1 def minimal_function(array): return array.data a = np.zeros((6, 3)).view(MinimalSubclass) assert_array_equal( apply_along_axis(minimal_function, 0, a), np.array([1, 1, 1]) ) def test_scalar_array(self, cls=np.ndarray): a = np.ones((6, 3)).view(cls) res = apply_along_axis(np.sum, 0, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([6, 6, 6]).view(cls)) def test_0d_array(self, cls=np.ndarray): def sum_to_0d(x): """ Sum x, returning a 0d array of the same class """ assert_equal(x.ndim, 1) return np.squeeze(np.sum(x, keepdims=True)) a = np.ones((6, 3)).view(cls) res = apply_along_axis(sum_to_0d, 0, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([6, 6, 6]).view(cls)) res = apply_along_axis(sum_to_0d, 1, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([3, 3, 3, 3, 3, 3]).view(cls)) def test_axis_insertion(self, cls=np.ndarray): def f1to2(x): """produces an asymmetric non-square matrix from x""" assert_equal(x.ndim, 1) return (x[::-1] * x[1:,None]).view(cls) a2d = np.arange(6*3).reshape((6, 3)) # 2d insertion along first axis actual = apply_along_axis(f1to2, 0, a2d) expected = np.stack([ f1to2(a2d[:,i]) for i in range(a2d.shape[1]) ], axis=-1).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) # 2d insertion along last axis actual = apply_along_axis(f1to2, 1, a2d) expected = np.stack([ f1to2(a2d[i,:]) for i in range(a2d.shape[0]) ], axis=0).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) # 3d insertion along middle axis a3d = np.arange(6*5*3).reshape((6, 5, 3)) actual = apply_along_axis(f1to2, 1, a3d) expected = np.stack([ np.stack([ f1to2(a3d[i,:,j]) for i in range(a3d.shape[0]) ], axis=0) for j in range(a3d.shape[2]) ], axis=-1).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) def test_subclass_preservation(self): class MinimalSubclass(np.ndarray): pass self.test_scalar_array(MinimalSubclass) self.test_0d_array(MinimalSubclass) self.test_axis_insertion(MinimalSubclass) def test_axis_insertion_ma(self): def f1to2(x): """produces an asymmetric non-square matrix from x""" assert_equal(x.ndim, 1) res = x[::-1] * x[1:,None] return np.ma.masked_where(res%5==0, res) a = np.arange(6*3).reshape((6, 3)) res = apply_along_axis(f1to2, 0, a) assert_(isinstance(res, np.ma.masked_array)) assert_equal(res.ndim, 3) assert_array_equal(res[:,:,0].mask, f1to2(a[:,0]).mask) assert_array_equal(res[:,:,1].mask, f1to2(a[:,1]).mask) assert_array_equal(res[:,:,2].mask, f1to2(a[:,2]).mask) def test_tuple_func1d(self): def sample_1d(x): return x[1], x[0] res = np.apply_along_axis(sample_1d, 1, np.array([[1, 2], [3, 4]])) assert_array_equal(res, np.array([[2, 1], [4, 3]])) def test_empty(self): # can't apply_along_axis when there's no chance to call the function def never_call(x): assert_(False) # should never be reached a = np.empty((0, 0)) assert_raises(ValueError, np.apply_along_axis, never_call, 0, a) assert_raises(ValueError, np.apply_along_axis, never_call, 1, a) # but it's sometimes ok with some non-zero dimensions def empty_to_1(x): assert_(len(x) == 0) return 1 a = np.empty((10, 0)) actual = np.apply_along_axis(empty_to_1, 1, a) assert_equal(actual, np.ones(10)) assert_raises(ValueError, np.apply_along_axis, empty_to_1, 0, a) def test_with_iterable_object(self): # from issue 5248 d = np.array([ [{1, 11}, {2, 22}, {3, 33}], [{4, 44}, {5, 55}, {6, 66}] ]) actual = np.apply_along_axis(lambda a: set.union(*a), 0, d) expected = np.array([{1, 11, 4, 44}, {2, 22, 5, 55}, {3, 33, 6, 66}]) assert_equal(actual, expected) # issue 8642 - assert_equal doesn't detect this! for i in np.ndindex(actual.shape): assert_equal(type(actual[i]), type(expected[i])) class TestApplyOverAxes: def test_simple(self): a = np.arange(24).reshape(2, 3, 4) aoa_a = apply_over_axes(np.sum, a, [0, 2]) assert_array_equal(aoa_a, np.array([[[60], [92], [124]]])) class TestExpandDims: def test_functionality(self): s = (2, 3, 4, 5) a = np.empty(s) for axis in range(-5, 4): b = expand_dims(a, axis) assert_(b.shape[axis] == 1) assert_(np.squeeze(b).shape == s) def test_axis_tuple(self): a = np.empty((3, 3, 3)) assert np.expand_dims(a, axis=(0, 1, 2)).shape == (1, 1, 1, 3, 3, 3) assert np.expand_dims(a, axis=(0, -1, -2)).shape == (1, 3, 3, 3, 1, 1) assert np.expand_dims(a, axis=(0, 3, 5)).shape == (1, 3, 3, 1, 3, 1) assert np.expand_dims(a, axis=(0, -3, -5)).shape == (1, 1, 3, 1, 3, 3) def test_axis_out_of_range(self): s = (2, 3, 4, 5) a = np.empty(s) assert_raises(np.AxisError, expand_dims, a, -6) assert_raises(np.AxisError, expand_dims, a, 5) a = np.empty((3, 3, 3)) assert_raises(np.AxisError, expand_dims, a, (0, -6)) assert_raises(np.AxisError, expand_dims, a, (0, 5)) def test_repeated_axis(self): a = np.empty((3, 3, 3)) assert_raises(ValueError, expand_dims, a, axis=(1, 1)) def test_subclasses(self): a = np.arange(10).reshape((2, 5)) a = np.ma.array(a, mask=a%3 == 0) expanded = np.expand_dims(a, axis=1) assert_(isinstance(expanded, np.ma.MaskedArray)) assert_equal(expanded.shape, (2, 1, 5)) assert_equal(expanded.mask.shape, (2, 1, 5)) class TestArraySplit: def test_integer_0_split(self): a = np.arange(10) assert_raises(ValueError, array_split, a, 0) def test_integer_split(self): a = np.arange(10) res = array_split(a, 1) desired = [np.arange(10)] compare_results(res, desired) res = array_split(a, 2) desired = [np.arange(5), np.arange(5, 10)] compare_results(res, desired) res = array_split(a, 3) desired = [np.arange(4), np.arange(4, 7), np.arange(7, 10)] compare_results(res, desired) res = array_split(a, 4) desired = [np.arange(3), np.arange(3, 6), np.arange(6, 8), np.arange(8, 10)] compare_results(res, desired) res = array_split(a, 5) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 8), np.arange(8, 10)] compare_results(res, desired) res = array_split(a, 6) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 7) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 8) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 9) desired = [np.arange(2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 10) desired = [np.arange(1), np.arange(1, 2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 11) desired = [np.arange(1), np.arange(1, 2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10), np.array([])] compare_results(res, desired) def test_integer_split_2D_rows(self): a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3, axis=0) tgt = [np.array([np.arange(10)]), np.array([np.arange(10)]), np.zeros((0, 10))] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) # Same thing for manual splits: res = array_split(a, [0, 1, 2], axis=0) tgt = [np.zeros((0, 10)), np.array([np.arange(10)]), np.array([np.arange(10)])] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) def test_integer_split_2D_cols(self): a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3, axis=-1) desired = [np.array([np.arange(4), np.arange(4)]), np.array([np.arange(4, 7), np.arange(4, 7)]), np.array([np.arange(7, 10), np.arange(7, 10)])] compare_results(res, desired) def test_integer_split_2D_default(self): """ This will fail if we change default axis """ a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3) tgt = [np.array([np.arange(10)]), np.array([np.arange(10)]), np.zeros((0, 10))] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) # perhaps should check higher dimensions @pytest.mark.skipif(not IS_64BIT, reason="Needs 64bit platform") def test_integer_split_2D_rows_greater_max_int32(self): a = np.broadcast_to([0], (1 << 32, 2)) res = array_split(a, 4) chunk = np.broadcast_to([0], (1 << 30, 2)) tgt = [chunk] * 4 for i in range(len(tgt)): assert_equal(res[i].shape, tgt[i].shape) def test_index_split_simple(self): a = np.arange(10) indices = [1, 5, 7] res = array_split(a, indices, axis=-1) desired = [np.arange(0, 1), np.arange(1, 5), np.arange(5, 7), np.arange(7, 10)] compare_results(res, desired) def test_index_split_low_bound(self): a = np.arange(10) indices = [0, 5, 7] res = array_split(a, indices, axis=-1) desired = [np.array([]), np.arange(0, 5), np.arange(5, 7), np.arange(7, 10)] compare_results(res, desired) def test_index_split_high_bound(self): a = np.arange(10) indices = [0, 5, 7, 10, 12] res = array_split(a, indices, axis=-1) desired = [np.array([]), np.arange(0, 5), np.arange(5, 7), np.arange(7, 10), np.array([]), np.array([])] compare_results(res, desired) class TestSplit: # The split function is essentially the same as array_split, # except that it test if splitting will result in an # equal split. Only test for this case. def test_equal_split(self): a = np.arange(10) res = split(a, 2) desired = [np.arange(5), np.arange(5, 10)] compare_results(res, desired) def test_unequal_split(self): a = np.arange(10) assert_raises(ValueError, split, a, 3) class TestColumnStack: def test_non_iterable(self): assert_raises(TypeError, column_stack, 1) def test_1D_arrays(self): # example from docstring a = np.array((1, 2, 3)) b = np.array((2, 3, 4)) expected = np.array([[1, 2], [2, 3], [3, 4]]) actual = np.column_stack((a, b)) assert_equal(actual, expected) def test_2D_arrays(self): # same as hstack 2D docstring example a = np.array([[1], [2], [3]]) b = np.array([[2], [3], [4]]) expected = np.array([[1, 2], [2, 3], [3, 4]]) actual = np.column_stack((a, b)) assert_equal(actual, expected) def test_generator(self): with assert_warns(FutureWarning): column_stack((np.arange(3) for _ in range(2))) class TestDstack: def test_non_iterable(self): assert_raises(TypeError, dstack, 1) def test_0D_array(self): a = np.array(1) b = np.array(2) res = dstack([a, b]) desired = np.array([[[1, 2]]]) assert_array_equal(res, desired) def test_1D_array(self): a = np.array([1]) b = np.array([2]) res = dstack([a, b]) desired = np.array([[[1, 2]]]) assert_array_equal(res, desired) def test_2D_array(self): a = np.array([[1], [2]]) b = np.array([[1], [2]]) res = dstack([a, b]) desired = np.array([[[1, 1]], [[2, 2, ]]]) assert_array_equal(res, desired) def test_2D_array2(self): a = np.array([1, 2]) b = np.array([1, 2]) res = dstack([a, b]) desired = np.array([[[1, 1], [2, 2]]]) assert_array_equal(res, desired) def test_generator(self): with assert_warns(FutureWarning): dstack((np.arange(3) for _ in range(2))) # array_split has more comprehensive test of splitting. # only do simple test on hsplit, vsplit, and dsplit class TestHsplit: """Only testing for integer splits. """ def test_non_iterable(self): assert_raises(ValueError, hsplit, 1, 1) def test_0D_array(self): a = np.array(1) try: hsplit(a, 2) assert_(0) except ValueError: pass def test_1D_array(self): a = np.array([1, 2, 3, 4]) res = hsplit(a, 2) desired = [np.array([1, 2]), np.array([3, 4])] compare_results(res, desired) def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) res = hsplit(a, 2) desired = [np.array([[1, 2], [1, 2]]), np.array([[3, 4], [3, 4]])] compare_results(res, desired) class TestVsplit: """Only testing for integer splits. """ def test_non_iterable(self): assert_raises(ValueError, vsplit, 1, 1) def test_0D_array(self): a = np.array(1) assert_raises(ValueError, vsplit, a, 2) def test_1D_array(self): a = np.array([1, 2, 3, 4]) try: vsplit(a, 2) assert_(0) except ValueError: pass def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) res = vsplit(a, 2) desired = [np.array([[1, 2, 3, 4]]), np.array([[1, 2, 3, 4]])] compare_results(res, desired) class TestDsplit: # Only testing for integer splits. def test_non_iterable(self): assert_raises(ValueError, dsplit, 1, 1) def test_0D_array(self): a = np.array(1) assert_raises(ValueError, dsplit, a, 2) def test_1D_array(self): a = np.array([1, 2, 3, 4]) assert_raises(ValueError, dsplit, a, 2) def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) try: dsplit(a, 2) assert_(0) except ValueError: pass def test_3D_array(self): a = np.array([[[1, 2, 3, 4], [1, 2, 3, 4]], [[1, 2, 3, 4], [1, 2, 3, 4]]]) res = dsplit(a, 2) desired = [np.array([[[1, 2], [1, 2]], [[1, 2], [1, 2]]]), np.array([[[3, 4], [3, 4]], [[3, 4], [3, 4]]])] compare_results(res, desired) class TestSqueeze: def test_basic(self): from numpy.random import rand a = rand(20, 10, 10, 1, 1) b = rand(20, 1, 10, 1, 20) c = rand(1, 1, 20, 10) assert_array_equal(np.squeeze(a), np.reshape(a, (20, 10, 10))) assert_array_equal(np.squeeze(b), np.reshape(b, (20, 10, 20))) assert_array_equal(np.squeeze(c), np.reshape(c, (20, 10))) # Squeezing to 0-dim should still give an ndarray a = [[[1.5]]] res = np.squeeze(a) assert_equal(res, 1.5) assert_equal(res.ndim, 0) assert_equal(type(res), np.ndarray) class TestKron: def test_return_type(self): class myarray(np.ndarray): __array_priority__ = 0.0 a = np.ones([2, 2]) ma = myarray(a.shape, a.dtype, a.data) assert_equal(type(kron(a, a)), np.ndarray) assert_equal(type(kron(ma, ma)), myarray) assert_equal(type(kron(a, ma)), np.ndarray) assert_equal(type(kron(ma, a)), myarray) class TestTile: def test_basic(self): a = np.array([0, 1, 2]) b = [[1, 2], [3, 4]] assert_equal(tile(a, 2), [0, 1, 2, 0, 1, 2]) assert_equal(tile(a, (2, 2)), [[0, 1, 2, 0, 1, 2], [0, 1, 2, 0, 1, 2]]) assert_equal(tile(a, (1, 2)), [[0, 1, 2, 0, 1, 2]]) assert_equal(tile(b, 2), [[1, 2, 1, 2], [3, 4, 3, 4]]) assert_equal(tile(b, (2, 1)), [[1, 2], [3, 4], [1, 2], [3, 4]]) assert_equal(tile(b, (2, 2)), [[1, 2, 1, 2], [3, 4, 3, 4], [1, 2, 1, 2], [3, 4, 3, 4]]) def test_tile_one_repetition_on_array_gh4679(self): a = np.arange(5) b = tile(a, 1) b += 2 assert_equal(a, np.arange(5)) def test_empty(self): a = np.array([[[]]]) b = np.array([[], []]) c = tile(b, 2).shape d = tile(a, (3, 2, 5)).shape assert_equal(c, (2, 0)) assert_equal(d, (3, 2, 0)) def test_kroncompare(self): from numpy.random import randint reps = [(2,), (1, 2), (2, 1), (2, 2), (2, 3, 2), (3, 2)] shape = [(3,), (2, 3), (3, 4, 3), (3, 2, 3), (4, 3, 2, 4), (2, 2)] for s in shape: b = randint(0, 10, size=s) for r in reps: a = np.ones(r, b.dtype) large = tile(b, r) klarge = kron(a, b) assert_equal(large, klarge) class TestMayShareMemory: def test_basic(self): d = np.ones((50, 60)) d2 = np.ones((30, 60, 6)) assert_(np.may_share_memory(d, d)) assert_(np.may_share_memory(d, d[::-1])) assert_(np.may_share_memory(d, d[::2])) assert_(np.may_share_memory(d, d[1:, ::-1])) assert_(not np.may_share_memory(d[::-1], d2)) assert_(not np.may_share_memory(d[::2], d2)) assert_(not np.may_share_memory(d[1:, ::-1], d2)) assert_(np.may_share_memory(d2[1:, ::-1], d2)) # Utility def compare_results(res, desired): for i in range(len(desired)): assert_array_equal(res[i], desired[i])
mhvk/numpy
numpy/lib/tests/test_shape_base.py
numpy/lib/arraypad.py
""" Pytest configuration and fixtures for the Numpy test suite. """ import os import tempfile import hypothesis import pytest import numpy from numpy.core._multiarray_tests import get_fpu_mode _old_fpu_mode = None _collect_results = {} # Use a known and persistent tmpdir for hypothesis' caches, which # can be automatically cleared by the OS or user. hypothesis.configuration.set_hypothesis_home_dir( os.path.join(tempfile.gettempdir(), ".hypothesis") ) # We register two custom profiles for Numpy - for details see # https://hypothesis.readthedocs.io/en/latest/settings.html # The first is designed for our own CI runs; the latter also # forces determinism and is designed for use via np.test() hypothesis.settings.register_profile( name="numpy-profile", deadline=None, print_blob=True, ) hypothesis.settings.register_profile( name="np.test() profile", deadline=None, print_blob=True, database=None, derandomize=True, suppress_health_check=hypothesis.HealthCheck.all(), ) # Note that the default profile is chosen based on the presence # of pytest.ini, but can be overriden by passing the # --hypothesis-profile=NAME argument to pytest. _pytest_ini = os.path.join(os.path.dirname(__file__), "..", "pytest.ini") hypothesis.settings.load_profile( "numpy-profile" if os.path.isfile(_pytest_ini) else "np.test() profile" ) def pytest_configure(config): config.addinivalue_line("markers", "valgrind_error: Tests that are known to error under valgrind.") config.addinivalue_line("markers", "leaks_references: Tests that are known to leak references.") config.addinivalue_line("markers", "slow: Tests that are very slow.") config.addinivalue_line("markers", "slow_pypy: Tests that are very slow on pypy.") def pytest_addoption(parser): parser.addoption("--available-memory", action="store", default=None, help=("Set amount of memory available for running the " "test suite. This can result to tests requiring " "especially large amounts of memory to be skipped. " "Equivalent to setting environment variable " "NPY_AVAILABLE_MEM. Default: determined" "automatically.")) def pytest_sessionstart(session): available_mem = session.config.getoption('available_memory') if available_mem is not None: os.environ['NPY_AVAILABLE_MEM'] = available_mem #FIXME when yield tests are gone. @pytest.hookimpl() def pytest_itemcollected(item): """ Check FPU precision mode was not changed during test collection. The clumsy way we do it here is mainly necessary because numpy still uses yield tests, which can execute code at test collection time. """ global _old_fpu_mode mode = get_fpu_mode() if _old_fpu_mode is None: _old_fpu_mode = mode elif mode != _old_fpu_mode: _collect_results[item] = (_old_fpu_mode, mode) _old_fpu_mode = mode @pytest.fixture(scope="function", autouse=True) def check_fpu_mode(request): """ Check FPU precision mode was not changed during the test. """ old_mode = get_fpu_mode() yield new_mode = get_fpu_mode() if old_mode != new_mode: raise AssertionError("FPU precision mode changed from {0:#x} to {1:#x}" " during the test".format(old_mode, new_mode)) collect_result = _collect_results.get(request.node) if collect_result is not None: old_mode, new_mode = collect_result raise AssertionError("FPU precision mode changed from {0:#x} to {1:#x}" " when collecting the test".format(old_mode, new_mode)) @pytest.fixture(autouse=True) def add_np(doctest_namespace): doctest_namespace['np'] = numpy @pytest.fixture(autouse=True) def env_setup(monkeypatch): monkeypatch.setenv('PYTHONHASHSEED', '0')
import numpy as np import functools import sys import pytest from numpy.lib.shape_base import ( apply_along_axis, apply_over_axes, array_split, split, hsplit, dsplit, vsplit, dstack, column_stack, kron, tile, expand_dims, take_along_axis, put_along_axis ) from numpy.testing import ( assert_, assert_equal, assert_array_equal, assert_raises, assert_warns ) IS_64BIT = sys.maxsize > 2**32 def _add_keepdims(func): """ hack in keepdims behavior into a function taking an axis """ @functools.wraps(func) def wrapped(a, axis, **kwargs): res = func(a, axis=axis, **kwargs) if axis is None: axis = 0 # res is now a scalar, so we can insert this anywhere return np.expand_dims(res, axis=axis) return wrapped class TestTakeAlongAxis: def test_argequivalent(self): """ Test it translates from arg<func> to <func> """ from numpy.random import rand a = rand(3, 4, 5) funcs = [ (np.sort, np.argsort, dict()), (_add_keepdims(np.min), _add_keepdims(np.argmin), dict()), (_add_keepdims(np.max), _add_keepdims(np.argmax), dict()), (np.partition, np.argpartition, dict(kth=2)), ] for func, argfunc, kwargs in funcs: for axis in list(range(a.ndim)) + [None]: a_func = func(a, axis=axis, **kwargs) ai_func = argfunc(a, axis=axis, **kwargs) assert_equal(a_func, take_along_axis(a, ai_func, axis=axis)) def test_invalid(self): """ Test it errors when indices has too few dimensions """ a = np.ones((10, 10)) ai = np.ones((10, 2), dtype=np.intp) # sanity check take_along_axis(a, ai, axis=1) # not enough indices assert_raises(ValueError, take_along_axis, a, np.array(1), axis=1) # bool arrays not allowed assert_raises(IndexError, take_along_axis, a, ai.astype(bool), axis=1) # float arrays not allowed assert_raises(IndexError, take_along_axis, a, ai.astype(float), axis=1) # invalid axis assert_raises(np.AxisError, take_along_axis, a, ai, axis=10) def test_empty(self): """ Test everything is ok with empty results, even with inserted dims """ a = np.ones((3, 4, 5)) ai = np.ones((3, 0, 5), dtype=np.intp) actual = take_along_axis(a, ai, axis=1) assert_equal(actual.shape, ai.shape) def test_broadcast(self): """ Test that non-indexing dimensions are broadcast in both directions """ a = np.ones((3, 4, 1)) ai = np.ones((1, 2, 5), dtype=np.intp) actual = take_along_axis(a, ai, axis=1) assert_equal(actual.shape, (3, 2, 5)) class TestPutAlongAxis: def test_replace_max(self): a_base = np.array([[10, 30, 20], [60, 40, 50]]) for axis in list(range(a_base.ndim)) + [None]: # we mutate this in the loop a = a_base.copy() # replace the max with a small value i_max = _add_keepdims(np.argmax)(a, axis=axis) put_along_axis(a, i_max, -99, axis=axis) # find the new minimum, which should max i_min = _add_keepdims(np.argmin)(a, axis=axis) assert_equal(i_min, i_max) def test_broadcast(self): """ Test that non-indexing dimensions are broadcast in both directions """ a = np.ones((3, 4, 1)) ai = np.arange(10, dtype=np.intp).reshape((1, 2, 5)) % 4 put_along_axis(a, ai, 20, axis=1) assert_equal(take_along_axis(a, ai, axis=1), 20) class TestApplyAlongAxis: def test_simple(self): a = np.ones((20, 10), 'd') assert_array_equal( apply_along_axis(len, 0, a), len(a)*np.ones(a.shape[1])) def test_simple101(self): a = np.ones((10, 101), 'd') assert_array_equal( apply_along_axis(len, 0, a), len(a)*np.ones(a.shape[1])) def test_3d(self): a = np.arange(27).reshape((3, 3, 3)) assert_array_equal(apply_along_axis(np.sum, 0, a), [[27, 30, 33], [36, 39, 42], [45, 48, 51]]) def test_preserve_subclass(self): def double(row): return row * 2 class MyNDArray(np.ndarray): pass m = np.array([[0, 1], [2, 3]]).view(MyNDArray) expected = np.array([[0, 2], [4, 6]]).view(MyNDArray) result = apply_along_axis(double, 0, m) assert_(isinstance(result, MyNDArray)) assert_array_equal(result, expected) result = apply_along_axis(double, 1, m) assert_(isinstance(result, MyNDArray)) assert_array_equal(result, expected) def test_subclass(self): class MinimalSubclass(np.ndarray): data = 1 def minimal_function(array): return array.data a = np.zeros((6, 3)).view(MinimalSubclass) assert_array_equal( apply_along_axis(minimal_function, 0, a), np.array([1, 1, 1]) ) def test_scalar_array(self, cls=np.ndarray): a = np.ones((6, 3)).view(cls) res = apply_along_axis(np.sum, 0, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([6, 6, 6]).view(cls)) def test_0d_array(self, cls=np.ndarray): def sum_to_0d(x): """ Sum x, returning a 0d array of the same class """ assert_equal(x.ndim, 1) return np.squeeze(np.sum(x, keepdims=True)) a = np.ones((6, 3)).view(cls) res = apply_along_axis(sum_to_0d, 0, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([6, 6, 6]).view(cls)) res = apply_along_axis(sum_to_0d, 1, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([3, 3, 3, 3, 3, 3]).view(cls)) def test_axis_insertion(self, cls=np.ndarray): def f1to2(x): """produces an asymmetric non-square matrix from x""" assert_equal(x.ndim, 1) return (x[::-1] * x[1:,None]).view(cls) a2d = np.arange(6*3).reshape((6, 3)) # 2d insertion along first axis actual = apply_along_axis(f1to2, 0, a2d) expected = np.stack([ f1to2(a2d[:,i]) for i in range(a2d.shape[1]) ], axis=-1).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) # 2d insertion along last axis actual = apply_along_axis(f1to2, 1, a2d) expected = np.stack([ f1to2(a2d[i,:]) for i in range(a2d.shape[0]) ], axis=0).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) # 3d insertion along middle axis a3d = np.arange(6*5*3).reshape((6, 5, 3)) actual = apply_along_axis(f1to2, 1, a3d) expected = np.stack([ np.stack([ f1to2(a3d[i,:,j]) for i in range(a3d.shape[0]) ], axis=0) for j in range(a3d.shape[2]) ], axis=-1).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) def test_subclass_preservation(self): class MinimalSubclass(np.ndarray): pass self.test_scalar_array(MinimalSubclass) self.test_0d_array(MinimalSubclass) self.test_axis_insertion(MinimalSubclass) def test_axis_insertion_ma(self): def f1to2(x): """produces an asymmetric non-square matrix from x""" assert_equal(x.ndim, 1) res = x[::-1] * x[1:,None] return np.ma.masked_where(res%5==0, res) a = np.arange(6*3).reshape((6, 3)) res = apply_along_axis(f1to2, 0, a) assert_(isinstance(res, np.ma.masked_array)) assert_equal(res.ndim, 3) assert_array_equal(res[:,:,0].mask, f1to2(a[:,0]).mask) assert_array_equal(res[:,:,1].mask, f1to2(a[:,1]).mask) assert_array_equal(res[:,:,2].mask, f1to2(a[:,2]).mask) def test_tuple_func1d(self): def sample_1d(x): return x[1], x[0] res = np.apply_along_axis(sample_1d, 1, np.array([[1, 2], [3, 4]])) assert_array_equal(res, np.array([[2, 1], [4, 3]])) def test_empty(self): # can't apply_along_axis when there's no chance to call the function def never_call(x): assert_(False) # should never be reached a = np.empty((0, 0)) assert_raises(ValueError, np.apply_along_axis, never_call, 0, a) assert_raises(ValueError, np.apply_along_axis, never_call, 1, a) # but it's sometimes ok with some non-zero dimensions def empty_to_1(x): assert_(len(x) == 0) return 1 a = np.empty((10, 0)) actual = np.apply_along_axis(empty_to_1, 1, a) assert_equal(actual, np.ones(10)) assert_raises(ValueError, np.apply_along_axis, empty_to_1, 0, a) def test_with_iterable_object(self): # from issue 5248 d = np.array([ [{1, 11}, {2, 22}, {3, 33}], [{4, 44}, {5, 55}, {6, 66}] ]) actual = np.apply_along_axis(lambda a: set.union(*a), 0, d) expected = np.array([{1, 11, 4, 44}, {2, 22, 5, 55}, {3, 33, 6, 66}]) assert_equal(actual, expected) # issue 8642 - assert_equal doesn't detect this! for i in np.ndindex(actual.shape): assert_equal(type(actual[i]), type(expected[i])) class TestApplyOverAxes: def test_simple(self): a = np.arange(24).reshape(2, 3, 4) aoa_a = apply_over_axes(np.sum, a, [0, 2]) assert_array_equal(aoa_a, np.array([[[60], [92], [124]]])) class TestExpandDims: def test_functionality(self): s = (2, 3, 4, 5) a = np.empty(s) for axis in range(-5, 4): b = expand_dims(a, axis) assert_(b.shape[axis] == 1) assert_(np.squeeze(b).shape == s) def test_axis_tuple(self): a = np.empty((3, 3, 3)) assert np.expand_dims(a, axis=(0, 1, 2)).shape == (1, 1, 1, 3, 3, 3) assert np.expand_dims(a, axis=(0, -1, -2)).shape == (1, 3, 3, 3, 1, 1) assert np.expand_dims(a, axis=(0, 3, 5)).shape == (1, 3, 3, 1, 3, 1) assert np.expand_dims(a, axis=(0, -3, -5)).shape == (1, 1, 3, 1, 3, 3) def test_axis_out_of_range(self): s = (2, 3, 4, 5) a = np.empty(s) assert_raises(np.AxisError, expand_dims, a, -6) assert_raises(np.AxisError, expand_dims, a, 5) a = np.empty((3, 3, 3)) assert_raises(np.AxisError, expand_dims, a, (0, -6)) assert_raises(np.AxisError, expand_dims, a, (0, 5)) def test_repeated_axis(self): a = np.empty((3, 3, 3)) assert_raises(ValueError, expand_dims, a, axis=(1, 1)) def test_subclasses(self): a = np.arange(10).reshape((2, 5)) a = np.ma.array(a, mask=a%3 == 0) expanded = np.expand_dims(a, axis=1) assert_(isinstance(expanded, np.ma.MaskedArray)) assert_equal(expanded.shape, (2, 1, 5)) assert_equal(expanded.mask.shape, (2, 1, 5)) class TestArraySplit: def test_integer_0_split(self): a = np.arange(10) assert_raises(ValueError, array_split, a, 0) def test_integer_split(self): a = np.arange(10) res = array_split(a, 1) desired = [np.arange(10)] compare_results(res, desired) res = array_split(a, 2) desired = [np.arange(5), np.arange(5, 10)] compare_results(res, desired) res = array_split(a, 3) desired = [np.arange(4), np.arange(4, 7), np.arange(7, 10)] compare_results(res, desired) res = array_split(a, 4) desired = [np.arange(3), np.arange(3, 6), np.arange(6, 8), np.arange(8, 10)] compare_results(res, desired) res = array_split(a, 5) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 8), np.arange(8, 10)] compare_results(res, desired) res = array_split(a, 6) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 7) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 8) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 9) desired = [np.arange(2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 10) desired = [np.arange(1), np.arange(1, 2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 11) desired = [np.arange(1), np.arange(1, 2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10), np.array([])] compare_results(res, desired) def test_integer_split_2D_rows(self): a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3, axis=0) tgt = [np.array([np.arange(10)]), np.array([np.arange(10)]), np.zeros((0, 10))] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) # Same thing for manual splits: res = array_split(a, [0, 1, 2], axis=0) tgt = [np.zeros((0, 10)), np.array([np.arange(10)]), np.array([np.arange(10)])] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) def test_integer_split_2D_cols(self): a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3, axis=-1) desired = [np.array([np.arange(4), np.arange(4)]), np.array([np.arange(4, 7), np.arange(4, 7)]), np.array([np.arange(7, 10), np.arange(7, 10)])] compare_results(res, desired) def test_integer_split_2D_default(self): """ This will fail if we change default axis """ a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3) tgt = [np.array([np.arange(10)]), np.array([np.arange(10)]), np.zeros((0, 10))] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) # perhaps should check higher dimensions @pytest.mark.skipif(not IS_64BIT, reason="Needs 64bit platform") def test_integer_split_2D_rows_greater_max_int32(self): a = np.broadcast_to([0], (1 << 32, 2)) res = array_split(a, 4) chunk = np.broadcast_to([0], (1 << 30, 2)) tgt = [chunk] * 4 for i in range(len(tgt)): assert_equal(res[i].shape, tgt[i].shape) def test_index_split_simple(self): a = np.arange(10) indices = [1, 5, 7] res = array_split(a, indices, axis=-1) desired = [np.arange(0, 1), np.arange(1, 5), np.arange(5, 7), np.arange(7, 10)] compare_results(res, desired) def test_index_split_low_bound(self): a = np.arange(10) indices = [0, 5, 7] res = array_split(a, indices, axis=-1) desired = [np.array([]), np.arange(0, 5), np.arange(5, 7), np.arange(7, 10)] compare_results(res, desired) def test_index_split_high_bound(self): a = np.arange(10) indices = [0, 5, 7, 10, 12] res = array_split(a, indices, axis=-1) desired = [np.array([]), np.arange(0, 5), np.arange(5, 7), np.arange(7, 10), np.array([]), np.array([])] compare_results(res, desired) class TestSplit: # The split function is essentially the same as array_split, # except that it test if splitting will result in an # equal split. Only test for this case. def test_equal_split(self): a = np.arange(10) res = split(a, 2) desired = [np.arange(5), np.arange(5, 10)] compare_results(res, desired) def test_unequal_split(self): a = np.arange(10) assert_raises(ValueError, split, a, 3) class TestColumnStack: def test_non_iterable(self): assert_raises(TypeError, column_stack, 1) def test_1D_arrays(self): # example from docstring a = np.array((1, 2, 3)) b = np.array((2, 3, 4)) expected = np.array([[1, 2], [2, 3], [3, 4]]) actual = np.column_stack((a, b)) assert_equal(actual, expected) def test_2D_arrays(self): # same as hstack 2D docstring example a = np.array([[1], [2], [3]]) b = np.array([[2], [3], [4]]) expected = np.array([[1, 2], [2, 3], [3, 4]]) actual = np.column_stack((a, b)) assert_equal(actual, expected) def test_generator(self): with assert_warns(FutureWarning): column_stack((np.arange(3) for _ in range(2))) class TestDstack: def test_non_iterable(self): assert_raises(TypeError, dstack, 1) def test_0D_array(self): a = np.array(1) b = np.array(2) res = dstack([a, b]) desired = np.array([[[1, 2]]]) assert_array_equal(res, desired) def test_1D_array(self): a = np.array([1]) b = np.array([2]) res = dstack([a, b]) desired = np.array([[[1, 2]]]) assert_array_equal(res, desired) def test_2D_array(self): a = np.array([[1], [2]]) b = np.array([[1], [2]]) res = dstack([a, b]) desired = np.array([[[1, 1]], [[2, 2, ]]]) assert_array_equal(res, desired) def test_2D_array2(self): a = np.array([1, 2]) b = np.array([1, 2]) res = dstack([a, b]) desired = np.array([[[1, 1], [2, 2]]]) assert_array_equal(res, desired) def test_generator(self): with assert_warns(FutureWarning): dstack((np.arange(3) for _ in range(2))) # array_split has more comprehensive test of splitting. # only do simple test on hsplit, vsplit, and dsplit class TestHsplit: """Only testing for integer splits. """ def test_non_iterable(self): assert_raises(ValueError, hsplit, 1, 1) def test_0D_array(self): a = np.array(1) try: hsplit(a, 2) assert_(0) except ValueError: pass def test_1D_array(self): a = np.array([1, 2, 3, 4]) res = hsplit(a, 2) desired = [np.array([1, 2]), np.array([3, 4])] compare_results(res, desired) def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) res = hsplit(a, 2) desired = [np.array([[1, 2], [1, 2]]), np.array([[3, 4], [3, 4]])] compare_results(res, desired) class TestVsplit: """Only testing for integer splits. """ def test_non_iterable(self): assert_raises(ValueError, vsplit, 1, 1) def test_0D_array(self): a = np.array(1) assert_raises(ValueError, vsplit, a, 2) def test_1D_array(self): a = np.array([1, 2, 3, 4]) try: vsplit(a, 2) assert_(0) except ValueError: pass def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) res = vsplit(a, 2) desired = [np.array([[1, 2, 3, 4]]), np.array([[1, 2, 3, 4]])] compare_results(res, desired) class TestDsplit: # Only testing for integer splits. def test_non_iterable(self): assert_raises(ValueError, dsplit, 1, 1) def test_0D_array(self): a = np.array(1) assert_raises(ValueError, dsplit, a, 2) def test_1D_array(self): a = np.array([1, 2, 3, 4]) assert_raises(ValueError, dsplit, a, 2) def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) try: dsplit(a, 2) assert_(0) except ValueError: pass def test_3D_array(self): a = np.array([[[1, 2, 3, 4], [1, 2, 3, 4]], [[1, 2, 3, 4], [1, 2, 3, 4]]]) res = dsplit(a, 2) desired = [np.array([[[1, 2], [1, 2]], [[1, 2], [1, 2]]]), np.array([[[3, 4], [3, 4]], [[3, 4], [3, 4]]])] compare_results(res, desired) class TestSqueeze: def test_basic(self): from numpy.random import rand a = rand(20, 10, 10, 1, 1) b = rand(20, 1, 10, 1, 20) c = rand(1, 1, 20, 10) assert_array_equal(np.squeeze(a), np.reshape(a, (20, 10, 10))) assert_array_equal(np.squeeze(b), np.reshape(b, (20, 10, 20))) assert_array_equal(np.squeeze(c), np.reshape(c, (20, 10))) # Squeezing to 0-dim should still give an ndarray a = [[[1.5]]] res = np.squeeze(a) assert_equal(res, 1.5) assert_equal(res.ndim, 0) assert_equal(type(res), np.ndarray) class TestKron: def test_return_type(self): class myarray(np.ndarray): __array_priority__ = 0.0 a = np.ones([2, 2]) ma = myarray(a.shape, a.dtype, a.data) assert_equal(type(kron(a, a)), np.ndarray) assert_equal(type(kron(ma, ma)), myarray) assert_equal(type(kron(a, ma)), np.ndarray) assert_equal(type(kron(ma, a)), myarray) class TestTile: def test_basic(self): a = np.array([0, 1, 2]) b = [[1, 2], [3, 4]] assert_equal(tile(a, 2), [0, 1, 2, 0, 1, 2]) assert_equal(tile(a, (2, 2)), [[0, 1, 2, 0, 1, 2], [0, 1, 2, 0, 1, 2]]) assert_equal(tile(a, (1, 2)), [[0, 1, 2, 0, 1, 2]]) assert_equal(tile(b, 2), [[1, 2, 1, 2], [3, 4, 3, 4]]) assert_equal(tile(b, (2, 1)), [[1, 2], [3, 4], [1, 2], [3, 4]]) assert_equal(tile(b, (2, 2)), [[1, 2, 1, 2], [3, 4, 3, 4], [1, 2, 1, 2], [3, 4, 3, 4]]) def test_tile_one_repetition_on_array_gh4679(self): a = np.arange(5) b = tile(a, 1) b += 2 assert_equal(a, np.arange(5)) def test_empty(self): a = np.array([[[]]]) b = np.array([[], []]) c = tile(b, 2).shape d = tile(a, (3, 2, 5)).shape assert_equal(c, (2, 0)) assert_equal(d, (3, 2, 0)) def test_kroncompare(self): from numpy.random import randint reps = [(2,), (1, 2), (2, 1), (2, 2), (2, 3, 2), (3, 2)] shape = [(3,), (2, 3), (3, 4, 3), (3, 2, 3), (4, 3, 2, 4), (2, 2)] for s in shape: b = randint(0, 10, size=s) for r in reps: a = np.ones(r, b.dtype) large = tile(b, r) klarge = kron(a, b) assert_equal(large, klarge) class TestMayShareMemory: def test_basic(self): d = np.ones((50, 60)) d2 = np.ones((30, 60, 6)) assert_(np.may_share_memory(d, d)) assert_(np.may_share_memory(d, d[::-1])) assert_(np.may_share_memory(d, d[::2])) assert_(np.may_share_memory(d, d[1:, ::-1])) assert_(not np.may_share_memory(d[::-1], d2)) assert_(not np.may_share_memory(d[::2], d2)) assert_(not np.may_share_memory(d[1:, ::-1], d2)) assert_(np.may_share_memory(d2[1:, ::-1], d2)) # Utility def compare_results(res, desired): for i in range(len(desired)): assert_array_equal(res[i], desired[i])
mhvk/numpy
numpy/lib/tests/test_shape_base.py
numpy/conftest.py
""" Create the numpy.core.umath namespace for backward compatibility. In v1.16 the multiarray and umath c-extension modules were merged into a single _multiarray_umath extension module. So we replicate the old namespace by importing from the extension module. """ from . import _multiarray_umath from ._multiarray_umath import * # noqa: F403 # These imports are needed for backward compatibility, # do not change them. issue gh-11862 # _ones_like is semi-public, on purpose not added to __all__ from ._multiarray_umath import _UFUNC_API, _add_newdoc_ufunc, _ones_like __all__ = [ '_UFUNC_API', 'ERR_CALL', 'ERR_DEFAULT', 'ERR_IGNORE', 'ERR_LOG', 'ERR_PRINT', 'ERR_RAISE', 'ERR_WARN', 'FLOATING_POINT_SUPPORT', 'FPE_DIVIDEBYZERO', 'FPE_INVALID', 'FPE_OVERFLOW', 'FPE_UNDERFLOW', 'NAN', 'NINF', 'NZERO', 'PINF', 'PZERO', 'SHIFT_DIVIDEBYZERO', 'SHIFT_INVALID', 'SHIFT_OVERFLOW', 'SHIFT_UNDERFLOW', 'UFUNC_BUFSIZE_DEFAULT', 'UFUNC_PYVALS_NAME', '_add_newdoc_ufunc', 'absolute', 'add', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'bitwise_and', 'bitwise_or', 'bitwise_xor', 'cbrt', 'ceil', 'conj', 'conjugate', 'copysign', 'cos', 'cosh', 'deg2rad', 'degrees', 'divide', 'divmod', 'e', 'equal', 'euler_gamma', 'exp', 'exp2', 'expm1', 'fabs', 'floor', 'floor_divide', 'float_power', 'fmax', 'fmin', 'fmod', 'frexp', 'frompyfunc', 'gcd', 'geterrobj', 'greater', 'greater_equal', 'heaviside', 'hypot', 'invert', 'isfinite', 'isinf', 'isnan', 'isnat', 'lcm', 'ldexp', 'left_shift', 'less', 'less_equal', 'log', 'log10', 'log1p', 'log2', 'logaddexp', 'logaddexp2', 'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'maximum', 'minimum', 'mod', 'modf', 'multiply', 'negative', 'nextafter', 'not_equal', 'pi', 'positive', 'power', 'rad2deg', 'radians', 'reciprocal', 'remainder', 'right_shift', 'rint', 'seterrobj', 'sign', 'signbit', 'sin', 'sinh', 'spacing', 'sqrt', 'square', 'subtract', 'tan', 'tanh', 'true_divide', 'trunc']
import numpy as np import functools import sys import pytest from numpy.lib.shape_base import ( apply_along_axis, apply_over_axes, array_split, split, hsplit, dsplit, vsplit, dstack, column_stack, kron, tile, expand_dims, take_along_axis, put_along_axis ) from numpy.testing import ( assert_, assert_equal, assert_array_equal, assert_raises, assert_warns ) IS_64BIT = sys.maxsize > 2**32 def _add_keepdims(func): """ hack in keepdims behavior into a function taking an axis """ @functools.wraps(func) def wrapped(a, axis, **kwargs): res = func(a, axis=axis, **kwargs) if axis is None: axis = 0 # res is now a scalar, so we can insert this anywhere return np.expand_dims(res, axis=axis) return wrapped class TestTakeAlongAxis: def test_argequivalent(self): """ Test it translates from arg<func> to <func> """ from numpy.random import rand a = rand(3, 4, 5) funcs = [ (np.sort, np.argsort, dict()), (_add_keepdims(np.min), _add_keepdims(np.argmin), dict()), (_add_keepdims(np.max), _add_keepdims(np.argmax), dict()), (np.partition, np.argpartition, dict(kth=2)), ] for func, argfunc, kwargs in funcs: for axis in list(range(a.ndim)) + [None]: a_func = func(a, axis=axis, **kwargs) ai_func = argfunc(a, axis=axis, **kwargs) assert_equal(a_func, take_along_axis(a, ai_func, axis=axis)) def test_invalid(self): """ Test it errors when indices has too few dimensions """ a = np.ones((10, 10)) ai = np.ones((10, 2), dtype=np.intp) # sanity check take_along_axis(a, ai, axis=1) # not enough indices assert_raises(ValueError, take_along_axis, a, np.array(1), axis=1) # bool arrays not allowed assert_raises(IndexError, take_along_axis, a, ai.astype(bool), axis=1) # float arrays not allowed assert_raises(IndexError, take_along_axis, a, ai.astype(float), axis=1) # invalid axis assert_raises(np.AxisError, take_along_axis, a, ai, axis=10) def test_empty(self): """ Test everything is ok with empty results, even with inserted dims """ a = np.ones((3, 4, 5)) ai = np.ones((3, 0, 5), dtype=np.intp) actual = take_along_axis(a, ai, axis=1) assert_equal(actual.shape, ai.shape) def test_broadcast(self): """ Test that non-indexing dimensions are broadcast in both directions """ a = np.ones((3, 4, 1)) ai = np.ones((1, 2, 5), dtype=np.intp) actual = take_along_axis(a, ai, axis=1) assert_equal(actual.shape, (3, 2, 5)) class TestPutAlongAxis: def test_replace_max(self): a_base = np.array([[10, 30, 20], [60, 40, 50]]) for axis in list(range(a_base.ndim)) + [None]: # we mutate this in the loop a = a_base.copy() # replace the max with a small value i_max = _add_keepdims(np.argmax)(a, axis=axis) put_along_axis(a, i_max, -99, axis=axis) # find the new minimum, which should max i_min = _add_keepdims(np.argmin)(a, axis=axis) assert_equal(i_min, i_max) def test_broadcast(self): """ Test that non-indexing dimensions are broadcast in both directions """ a = np.ones((3, 4, 1)) ai = np.arange(10, dtype=np.intp).reshape((1, 2, 5)) % 4 put_along_axis(a, ai, 20, axis=1) assert_equal(take_along_axis(a, ai, axis=1), 20) class TestApplyAlongAxis: def test_simple(self): a = np.ones((20, 10), 'd') assert_array_equal( apply_along_axis(len, 0, a), len(a)*np.ones(a.shape[1])) def test_simple101(self): a = np.ones((10, 101), 'd') assert_array_equal( apply_along_axis(len, 0, a), len(a)*np.ones(a.shape[1])) def test_3d(self): a = np.arange(27).reshape((3, 3, 3)) assert_array_equal(apply_along_axis(np.sum, 0, a), [[27, 30, 33], [36, 39, 42], [45, 48, 51]]) def test_preserve_subclass(self): def double(row): return row * 2 class MyNDArray(np.ndarray): pass m = np.array([[0, 1], [2, 3]]).view(MyNDArray) expected = np.array([[0, 2], [4, 6]]).view(MyNDArray) result = apply_along_axis(double, 0, m) assert_(isinstance(result, MyNDArray)) assert_array_equal(result, expected) result = apply_along_axis(double, 1, m) assert_(isinstance(result, MyNDArray)) assert_array_equal(result, expected) def test_subclass(self): class MinimalSubclass(np.ndarray): data = 1 def minimal_function(array): return array.data a = np.zeros((6, 3)).view(MinimalSubclass) assert_array_equal( apply_along_axis(minimal_function, 0, a), np.array([1, 1, 1]) ) def test_scalar_array(self, cls=np.ndarray): a = np.ones((6, 3)).view(cls) res = apply_along_axis(np.sum, 0, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([6, 6, 6]).view(cls)) def test_0d_array(self, cls=np.ndarray): def sum_to_0d(x): """ Sum x, returning a 0d array of the same class """ assert_equal(x.ndim, 1) return np.squeeze(np.sum(x, keepdims=True)) a = np.ones((6, 3)).view(cls) res = apply_along_axis(sum_to_0d, 0, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([6, 6, 6]).view(cls)) res = apply_along_axis(sum_to_0d, 1, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([3, 3, 3, 3, 3, 3]).view(cls)) def test_axis_insertion(self, cls=np.ndarray): def f1to2(x): """produces an asymmetric non-square matrix from x""" assert_equal(x.ndim, 1) return (x[::-1] * x[1:,None]).view(cls) a2d = np.arange(6*3).reshape((6, 3)) # 2d insertion along first axis actual = apply_along_axis(f1to2, 0, a2d) expected = np.stack([ f1to2(a2d[:,i]) for i in range(a2d.shape[1]) ], axis=-1).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) # 2d insertion along last axis actual = apply_along_axis(f1to2, 1, a2d) expected = np.stack([ f1to2(a2d[i,:]) for i in range(a2d.shape[0]) ], axis=0).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) # 3d insertion along middle axis a3d = np.arange(6*5*3).reshape((6, 5, 3)) actual = apply_along_axis(f1to2, 1, a3d) expected = np.stack([ np.stack([ f1to2(a3d[i,:,j]) for i in range(a3d.shape[0]) ], axis=0) for j in range(a3d.shape[2]) ], axis=-1).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) def test_subclass_preservation(self): class MinimalSubclass(np.ndarray): pass self.test_scalar_array(MinimalSubclass) self.test_0d_array(MinimalSubclass) self.test_axis_insertion(MinimalSubclass) def test_axis_insertion_ma(self): def f1to2(x): """produces an asymmetric non-square matrix from x""" assert_equal(x.ndim, 1) res = x[::-1] * x[1:,None] return np.ma.masked_where(res%5==0, res) a = np.arange(6*3).reshape((6, 3)) res = apply_along_axis(f1to2, 0, a) assert_(isinstance(res, np.ma.masked_array)) assert_equal(res.ndim, 3) assert_array_equal(res[:,:,0].mask, f1to2(a[:,0]).mask) assert_array_equal(res[:,:,1].mask, f1to2(a[:,1]).mask) assert_array_equal(res[:,:,2].mask, f1to2(a[:,2]).mask) def test_tuple_func1d(self): def sample_1d(x): return x[1], x[0] res = np.apply_along_axis(sample_1d, 1, np.array([[1, 2], [3, 4]])) assert_array_equal(res, np.array([[2, 1], [4, 3]])) def test_empty(self): # can't apply_along_axis when there's no chance to call the function def never_call(x): assert_(False) # should never be reached a = np.empty((0, 0)) assert_raises(ValueError, np.apply_along_axis, never_call, 0, a) assert_raises(ValueError, np.apply_along_axis, never_call, 1, a) # but it's sometimes ok with some non-zero dimensions def empty_to_1(x): assert_(len(x) == 0) return 1 a = np.empty((10, 0)) actual = np.apply_along_axis(empty_to_1, 1, a) assert_equal(actual, np.ones(10)) assert_raises(ValueError, np.apply_along_axis, empty_to_1, 0, a) def test_with_iterable_object(self): # from issue 5248 d = np.array([ [{1, 11}, {2, 22}, {3, 33}], [{4, 44}, {5, 55}, {6, 66}] ]) actual = np.apply_along_axis(lambda a: set.union(*a), 0, d) expected = np.array([{1, 11, 4, 44}, {2, 22, 5, 55}, {3, 33, 6, 66}]) assert_equal(actual, expected) # issue 8642 - assert_equal doesn't detect this! for i in np.ndindex(actual.shape): assert_equal(type(actual[i]), type(expected[i])) class TestApplyOverAxes: def test_simple(self): a = np.arange(24).reshape(2, 3, 4) aoa_a = apply_over_axes(np.sum, a, [0, 2]) assert_array_equal(aoa_a, np.array([[[60], [92], [124]]])) class TestExpandDims: def test_functionality(self): s = (2, 3, 4, 5) a = np.empty(s) for axis in range(-5, 4): b = expand_dims(a, axis) assert_(b.shape[axis] == 1) assert_(np.squeeze(b).shape == s) def test_axis_tuple(self): a = np.empty((3, 3, 3)) assert np.expand_dims(a, axis=(0, 1, 2)).shape == (1, 1, 1, 3, 3, 3) assert np.expand_dims(a, axis=(0, -1, -2)).shape == (1, 3, 3, 3, 1, 1) assert np.expand_dims(a, axis=(0, 3, 5)).shape == (1, 3, 3, 1, 3, 1) assert np.expand_dims(a, axis=(0, -3, -5)).shape == (1, 1, 3, 1, 3, 3) def test_axis_out_of_range(self): s = (2, 3, 4, 5) a = np.empty(s) assert_raises(np.AxisError, expand_dims, a, -6) assert_raises(np.AxisError, expand_dims, a, 5) a = np.empty((3, 3, 3)) assert_raises(np.AxisError, expand_dims, a, (0, -6)) assert_raises(np.AxisError, expand_dims, a, (0, 5)) def test_repeated_axis(self): a = np.empty((3, 3, 3)) assert_raises(ValueError, expand_dims, a, axis=(1, 1)) def test_subclasses(self): a = np.arange(10).reshape((2, 5)) a = np.ma.array(a, mask=a%3 == 0) expanded = np.expand_dims(a, axis=1) assert_(isinstance(expanded, np.ma.MaskedArray)) assert_equal(expanded.shape, (2, 1, 5)) assert_equal(expanded.mask.shape, (2, 1, 5)) class TestArraySplit: def test_integer_0_split(self): a = np.arange(10) assert_raises(ValueError, array_split, a, 0) def test_integer_split(self): a = np.arange(10) res = array_split(a, 1) desired = [np.arange(10)] compare_results(res, desired) res = array_split(a, 2) desired = [np.arange(5), np.arange(5, 10)] compare_results(res, desired) res = array_split(a, 3) desired = [np.arange(4), np.arange(4, 7), np.arange(7, 10)] compare_results(res, desired) res = array_split(a, 4) desired = [np.arange(3), np.arange(3, 6), np.arange(6, 8), np.arange(8, 10)] compare_results(res, desired) res = array_split(a, 5) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 8), np.arange(8, 10)] compare_results(res, desired) res = array_split(a, 6) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 7) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 8) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 9) desired = [np.arange(2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 10) desired = [np.arange(1), np.arange(1, 2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 11) desired = [np.arange(1), np.arange(1, 2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10), np.array([])] compare_results(res, desired) def test_integer_split_2D_rows(self): a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3, axis=0) tgt = [np.array([np.arange(10)]), np.array([np.arange(10)]), np.zeros((0, 10))] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) # Same thing for manual splits: res = array_split(a, [0, 1, 2], axis=0) tgt = [np.zeros((0, 10)), np.array([np.arange(10)]), np.array([np.arange(10)])] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) def test_integer_split_2D_cols(self): a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3, axis=-1) desired = [np.array([np.arange(4), np.arange(4)]), np.array([np.arange(4, 7), np.arange(4, 7)]), np.array([np.arange(7, 10), np.arange(7, 10)])] compare_results(res, desired) def test_integer_split_2D_default(self): """ This will fail if we change default axis """ a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3) tgt = [np.array([np.arange(10)]), np.array([np.arange(10)]), np.zeros((0, 10))] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) # perhaps should check higher dimensions @pytest.mark.skipif(not IS_64BIT, reason="Needs 64bit platform") def test_integer_split_2D_rows_greater_max_int32(self): a = np.broadcast_to([0], (1 << 32, 2)) res = array_split(a, 4) chunk = np.broadcast_to([0], (1 << 30, 2)) tgt = [chunk] * 4 for i in range(len(tgt)): assert_equal(res[i].shape, tgt[i].shape) def test_index_split_simple(self): a = np.arange(10) indices = [1, 5, 7] res = array_split(a, indices, axis=-1) desired = [np.arange(0, 1), np.arange(1, 5), np.arange(5, 7), np.arange(7, 10)] compare_results(res, desired) def test_index_split_low_bound(self): a = np.arange(10) indices = [0, 5, 7] res = array_split(a, indices, axis=-1) desired = [np.array([]), np.arange(0, 5), np.arange(5, 7), np.arange(7, 10)] compare_results(res, desired) def test_index_split_high_bound(self): a = np.arange(10) indices = [0, 5, 7, 10, 12] res = array_split(a, indices, axis=-1) desired = [np.array([]), np.arange(0, 5), np.arange(5, 7), np.arange(7, 10), np.array([]), np.array([])] compare_results(res, desired) class TestSplit: # The split function is essentially the same as array_split, # except that it test if splitting will result in an # equal split. Only test for this case. def test_equal_split(self): a = np.arange(10) res = split(a, 2) desired = [np.arange(5), np.arange(5, 10)] compare_results(res, desired) def test_unequal_split(self): a = np.arange(10) assert_raises(ValueError, split, a, 3) class TestColumnStack: def test_non_iterable(self): assert_raises(TypeError, column_stack, 1) def test_1D_arrays(self): # example from docstring a = np.array((1, 2, 3)) b = np.array((2, 3, 4)) expected = np.array([[1, 2], [2, 3], [3, 4]]) actual = np.column_stack((a, b)) assert_equal(actual, expected) def test_2D_arrays(self): # same as hstack 2D docstring example a = np.array([[1], [2], [3]]) b = np.array([[2], [3], [4]]) expected = np.array([[1, 2], [2, 3], [3, 4]]) actual = np.column_stack((a, b)) assert_equal(actual, expected) def test_generator(self): with assert_warns(FutureWarning): column_stack((np.arange(3) for _ in range(2))) class TestDstack: def test_non_iterable(self): assert_raises(TypeError, dstack, 1) def test_0D_array(self): a = np.array(1) b = np.array(2) res = dstack([a, b]) desired = np.array([[[1, 2]]]) assert_array_equal(res, desired) def test_1D_array(self): a = np.array([1]) b = np.array([2]) res = dstack([a, b]) desired = np.array([[[1, 2]]]) assert_array_equal(res, desired) def test_2D_array(self): a = np.array([[1], [2]]) b = np.array([[1], [2]]) res = dstack([a, b]) desired = np.array([[[1, 1]], [[2, 2, ]]]) assert_array_equal(res, desired) def test_2D_array2(self): a = np.array([1, 2]) b = np.array([1, 2]) res = dstack([a, b]) desired = np.array([[[1, 1], [2, 2]]]) assert_array_equal(res, desired) def test_generator(self): with assert_warns(FutureWarning): dstack((np.arange(3) for _ in range(2))) # array_split has more comprehensive test of splitting. # only do simple test on hsplit, vsplit, and dsplit class TestHsplit: """Only testing for integer splits. """ def test_non_iterable(self): assert_raises(ValueError, hsplit, 1, 1) def test_0D_array(self): a = np.array(1) try: hsplit(a, 2) assert_(0) except ValueError: pass def test_1D_array(self): a = np.array([1, 2, 3, 4]) res = hsplit(a, 2) desired = [np.array([1, 2]), np.array([3, 4])] compare_results(res, desired) def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) res = hsplit(a, 2) desired = [np.array([[1, 2], [1, 2]]), np.array([[3, 4], [3, 4]])] compare_results(res, desired) class TestVsplit: """Only testing for integer splits. """ def test_non_iterable(self): assert_raises(ValueError, vsplit, 1, 1) def test_0D_array(self): a = np.array(1) assert_raises(ValueError, vsplit, a, 2) def test_1D_array(self): a = np.array([1, 2, 3, 4]) try: vsplit(a, 2) assert_(0) except ValueError: pass def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) res = vsplit(a, 2) desired = [np.array([[1, 2, 3, 4]]), np.array([[1, 2, 3, 4]])] compare_results(res, desired) class TestDsplit: # Only testing for integer splits. def test_non_iterable(self): assert_raises(ValueError, dsplit, 1, 1) def test_0D_array(self): a = np.array(1) assert_raises(ValueError, dsplit, a, 2) def test_1D_array(self): a = np.array([1, 2, 3, 4]) assert_raises(ValueError, dsplit, a, 2) def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) try: dsplit(a, 2) assert_(0) except ValueError: pass def test_3D_array(self): a = np.array([[[1, 2, 3, 4], [1, 2, 3, 4]], [[1, 2, 3, 4], [1, 2, 3, 4]]]) res = dsplit(a, 2) desired = [np.array([[[1, 2], [1, 2]], [[1, 2], [1, 2]]]), np.array([[[3, 4], [3, 4]], [[3, 4], [3, 4]]])] compare_results(res, desired) class TestSqueeze: def test_basic(self): from numpy.random import rand a = rand(20, 10, 10, 1, 1) b = rand(20, 1, 10, 1, 20) c = rand(1, 1, 20, 10) assert_array_equal(np.squeeze(a), np.reshape(a, (20, 10, 10))) assert_array_equal(np.squeeze(b), np.reshape(b, (20, 10, 20))) assert_array_equal(np.squeeze(c), np.reshape(c, (20, 10))) # Squeezing to 0-dim should still give an ndarray a = [[[1.5]]] res = np.squeeze(a) assert_equal(res, 1.5) assert_equal(res.ndim, 0) assert_equal(type(res), np.ndarray) class TestKron: def test_return_type(self): class myarray(np.ndarray): __array_priority__ = 0.0 a = np.ones([2, 2]) ma = myarray(a.shape, a.dtype, a.data) assert_equal(type(kron(a, a)), np.ndarray) assert_equal(type(kron(ma, ma)), myarray) assert_equal(type(kron(a, ma)), np.ndarray) assert_equal(type(kron(ma, a)), myarray) class TestTile: def test_basic(self): a = np.array([0, 1, 2]) b = [[1, 2], [3, 4]] assert_equal(tile(a, 2), [0, 1, 2, 0, 1, 2]) assert_equal(tile(a, (2, 2)), [[0, 1, 2, 0, 1, 2], [0, 1, 2, 0, 1, 2]]) assert_equal(tile(a, (1, 2)), [[0, 1, 2, 0, 1, 2]]) assert_equal(tile(b, 2), [[1, 2, 1, 2], [3, 4, 3, 4]]) assert_equal(tile(b, (2, 1)), [[1, 2], [3, 4], [1, 2], [3, 4]]) assert_equal(tile(b, (2, 2)), [[1, 2, 1, 2], [3, 4, 3, 4], [1, 2, 1, 2], [3, 4, 3, 4]]) def test_tile_one_repetition_on_array_gh4679(self): a = np.arange(5) b = tile(a, 1) b += 2 assert_equal(a, np.arange(5)) def test_empty(self): a = np.array([[[]]]) b = np.array([[], []]) c = tile(b, 2).shape d = tile(a, (3, 2, 5)).shape assert_equal(c, (2, 0)) assert_equal(d, (3, 2, 0)) def test_kroncompare(self): from numpy.random import randint reps = [(2,), (1, 2), (2, 1), (2, 2), (2, 3, 2), (3, 2)] shape = [(3,), (2, 3), (3, 4, 3), (3, 2, 3), (4, 3, 2, 4), (2, 2)] for s in shape: b = randint(0, 10, size=s) for r in reps: a = np.ones(r, b.dtype) large = tile(b, r) klarge = kron(a, b) assert_equal(large, klarge) class TestMayShareMemory: def test_basic(self): d = np.ones((50, 60)) d2 = np.ones((30, 60, 6)) assert_(np.may_share_memory(d, d)) assert_(np.may_share_memory(d, d[::-1])) assert_(np.may_share_memory(d, d[::2])) assert_(np.may_share_memory(d, d[1:, ::-1])) assert_(not np.may_share_memory(d[::-1], d2)) assert_(not np.may_share_memory(d[::2], d2)) assert_(not np.may_share_memory(d[1:, ::-1], d2)) assert_(np.may_share_memory(d2[1:, ::-1], d2)) # Utility def compare_results(res, desired): for i in range(len(desired)): assert_array_equal(res[i], desired[i])
mhvk/numpy
numpy/lib/tests/test_shape_base.py
numpy/core/umath.py
""" Array methods which are called by both the C-code for the method and the Python code for the NumPy-namespace function """ import warnings from contextlib import nullcontext from numpy.core import multiarray as mu from numpy.core import umath as um from numpy.core.multiarray import asanyarray from numpy.core import numerictypes as nt from numpy.core import _exceptions from numpy._globals import _NoValue from numpy.compat import pickle, os_fspath # save those O(100) nanoseconds! umr_maximum = um.maximum.reduce umr_minimum = um.minimum.reduce umr_sum = um.add.reduce umr_prod = um.multiply.reduce umr_any = um.logical_or.reduce umr_all = um.logical_and.reduce # Complex types to -> (2,)float view for fast-path computation in _var() _complex_to_float = { nt.dtype(nt.csingle) : nt.dtype(nt.single), nt.dtype(nt.cdouble) : nt.dtype(nt.double), } # Special case for windows: ensure double takes precedence if nt.dtype(nt.longdouble) != nt.dtype(nt.double): _complex_to_float.update({ nt.dtype(nt.clongdouble) : nt.dtype(nt.longdouble), }) # avoid keyword arguments to speed up parsing, saves about 15%-20% for very # small reductions def _amax(a, axis=None, out=None, keepdims=False, initial=_NoValue, where=True): return umr_maximum(a, axis, None, out, keepdims, initial, where) def _amin(a, axis=None, out=None, keepdims=False, initial=_NoValue, where=True): return umr_minimum(a, axis, None, out, keepdims, initial, where) def _sum(a, axis=None, dtype=None, out=None, keepdims=False, initial=_NoValue, where=True): return umr_sum(a, axis, dtype, out, keepdims, initial, where) def _prod(a, axis=None, dtype=None, out=None, keepdims=False, initial=_NoValue, where=True): return umr_prod(a, axis, dtype, out, keepdims, initial, where) def _any(a, axis=None, dtype=None, out=None, keepdims=False, *, where=True): # Parsing keyword arguments is currently fairly slow, so avoid it for now if where is True: return umr_any(a, axis, dtype, out, keepdims) return umr_any(a, axis, dtype, out, keepdims, where=where) def _all(a, axis=None, dtype=None, out=None, keepdims=False, *, where=True): # Parsing keyword arguments is currently fairly slow, so avoid it for now if where is True: return umr_all(a, axis, dtype, out, keepdims) return umr_all(a, axis, dtype, out, keepdims, where=where) def _count_reduce_items(arr, axis, keepdims=False, where=True): # fast-path for the default case if where is True: # no boolean mask given, calculate items according to axis if axis is None: axis = tuple(range(arr.ndim)) elif not isinstance(axis, tuple): axis = (axis,) items = nt.intp(1) for ax in axis: items *= arr.shape[mu.normalize_axis_index(ax, arr.ndim)] else: # TODO: Optimize case when `where` is broadcast along a non-reduction # axis and full sum is more excessive than needed. # guarded to protect circular imports from numpy.lib.stride_tricks import broadcast_to # count True values in (potentially broadcasted) boolean mask items = umr_sum(broadcast_to(where, arr.shape), axis, nt.intp, None, keepdims) return items # Numpy 1.17.0, 2019-02-24 # Various clip behavior deprecations, marked with _clip_dep as a prefix. def _clip_dep_is_scalar_nan(a): # guarded to protect circular imports from numpy.core.fromnumeric import ndim if ndim(a) != 0: return False try: return um.isnan(a) except TypeError: return False def _clip_dep_is_byte_swapped(a): if isinstance(a, mu.ndarray): return not a.dtype.isnative return False def _clip_dep_invoke_with_casting(ufunc, *args, out=None, casting=None, **kwargs): # normal path if casting is not None: return ufunc(*args, out=out, casting=casting, **kwargs) # try to deal with broken casting rules try: return ufunc(*args, out=out, **kwargs) except _exceptions._UFuncOutputCastingError as e: # Numpy 1.17.0, 2019-02-24 warnings.warn( "Converting the output of clip from {!r} to {!r} is deprecated. " "Pass `casting=\"unsafe\"` explicitly to silence this warning, or " "correct the type of the variables.".format(e.from_, e.to), DeprecationWarning, stacklevel=2 ) return ufunc(*args, out=out, casting="unsafe", **kwargs) def _clip(a, min=None, max=None, out=None, *, casting=None, **kwargs): if min is None and max is None: raise ValueError("One of max or min must be given") # Numpy 1.17.0, 2019-02-24 # This deprecation probably incurs a substantial slowdown for small arrays, # it will be good to get rid of it. if not _clip_dep_is_byte_swapped(a) and not _clip_dep_is_byte_swapped(out): using_deprecated_nan = False if _clip_dep_is_scalar_nan(min): min = -float('inf') using_deprecated_nan = True if _clip_dep_is_scalar_nan(max): max = float('inf') using_deprecated_nan = True if using_deprecated_nan: warnings.warn( "Passing `np.nan` to mean no clipping in np.clip has always " "been unreliable, and is now deprecated. " "In future, this will always return nan, like it already does " "when min or max are arrays that contain nan. " "To skip a bound, pass either None or an np.inf of an " "appropriate sign.", DeprecationWarning, stacklevel=2 ) if min is None: return _clip_dep_invoke_with_casting( um.minimum, a, max, out=out, casting=casting, **kwargs) elif max is None: return _clip_dep_invoke_with_casting( um.maximum, a, min, out=out, casting=casting, **kwargs) else: return _clip_dep_invoke_with_casting( um.clip, a, min, max, out=out, casting=casting, **kwargs) def _mean(a, axis=None, dtype=None, out=None, keepdims=False, *, where=True): arr = asanyarray(a) is_float16_result = False rcount = _count_reduce_items(arr, axis, keepdims=keepdims, where=where) if rcount == 0 if where is True else umr_any(rcount == 0, axis=None): warnings.warn("Mean of empty slice.", RuntimeWarning, stacklevel=2) # Cast bool, unsigned int, and int to float64 by default if dtype is None: if issubclass(arr.dtype.type, (nt.integer, nt.bool_)): dtype = mu.dtype('f8') elif issubclass(arr.dtype.type, nt.float16): dtype = mu.dtype('f4') is_float16_result = True ret = umr_sum(arr, axis, dtype, out, keepdims, where=where) if isinstance(ret, mu.ndarray): ret = um.true_divide( ret, rcount, out=ret, casting='unsafe', subok=False) if is_float16_result and out is None: ret = arr.dtype.type(ret) elif hasattr(ret, 'dtype'): if is_float16_result: ret = arr.dtype.type(ret / rcount) else: ret = ret.dtype.type(ret / rcount) else: ret = ret / rcount return ret def _var(a, axis=None, dtype=None, out=None, ddof=0, keepdims=False, *, where=True): arr = asanyarray(a) rcount = _count_reduce_items(arr, axis, keepdims=keepdims, where=where) # Make this warning show up on top. if ddof >= rcount if where is True else umr_any(ddof >= rcount, axis=None): warnings.warn("Degrees of freedom <= 0 for slice", RuntimeWarning, stacklevel=2) # Cast bool, unsigned int, and int to float64 by default if dtype is None and issubclass(arr.dtype.type, (nt.integer, nt.bool_)): dtype = mu.dtype('f8') # Compute the mean. # Note that if dtype is not of inexact type then arraymean will # not be either. arrmean = umr_sum(arr, axis, dtype, keepdims=True, where=where) # The shape of rcount has to match arrmean to not change the shape of out # in broadcasting. Otherwise, it cannot be stored back to arrmean. if rcount.ndim == 0: # fast-path for default case when where is True div = rcount else: # matching rcount to arrmean when where is specified as array div = rcount.reshape(arrmean.shape) if isinstance(arrmean, mu.ndarray): arrmean = um.true_divide(arrmean, div, out=arrmean, casting='unsafe', subok=False) else: arrmean = arrmean.dtype.type(arrmean / rcount) # Compute sum of squared deviations from mean # Note that x may not be inexact and that we need it to be an array, # not a scalar. x = asanyarray(arr - arrmean) if issubclass(arr.dtype.type, (nt.floating, nt.integer)): x = um.multiply(x, x, out=x) # Fast-paths for built-in complex types elif x.dtype in _complex_to_float: xv = x.view(dtype=(_complex_to_float[x.dtype], (2,))) um.multiply(xv, xv, out=xv) x = um.add(xv[..., 0], xv[..., 1], out=x.real).real # Most general case; includes handling object arrays containing imaginary # numbers and complex types with non-native byteorder else: x = um.multiply(x, um.conjugate(x), out=x).real ret = umr_sum(x, axis, dtype, out, keepdims=keepdims, where=where) # Compute degrees of freedom and make sure it is not negative. rcount = um.maximum(rcount - ddof, 0) # divide by degrees of freedom if isinstance(ret, mu.ndarray): ret = um.true_divide( ret, rcount, out=ret, casting='unsafe', subok=False) elif hasattr(ret, 'dtype'): ret = ret.dtype.type(ret / rcount) else: ret = ret / rcount return ret def _std(a, axis=None, dtype=None, out=None, ddof=0, keepdims=False, *, where=True): ret = _var(a, axis=axis, dtype=dtype, out=out, ddof=ddof, keepdims=keepdims, where=where) if isinstance(ret, mu.ndarray): ret = um.sqrt(ret, out=ret) elif hasattr(ret, 'dtype'): ret = ret.dtype.type(um.sqrt(ret)) else: ret = um.sqrt(ret) return ret def _ptp(a, axis=None, out=None, keepdims=False): return um.subtract( umr_maximum(a, axis, None, out, keepdims), umr_minimum(a, axis, None, None, keepdims), out ) def _dump(self, file, protocol=2): if hasattr(file, 'write'): ctx = nullcontext(file) else: ctx = open(os_fspath(file), "wb") with ctx as f: pickle.dump(self, f, protocol=protocol) def _dumps(self, protocol=2): return pickle.dumps(self, protocol=protocol)
import numpy as np import functools import sys import pytest from numpy.lib.shape_base import ( apply_along_axis, apply_over_axes, array_split, split, hsplit, dsplit, vsplit, dstack, column_stack, kron, tile, expand_dims, take_along_axis, put_along_axis ) from numpy.testing import ( assert_, assert_equal, assert_array_equal, assert_raises, assert_warns ) IS_64BIT = sys.maxsize > 2**32 def _add_keepdims(func): """ hack in keepdims behavior into a function taking an axis """ @functools.wraps(func) def wrapped(a, axis, **kwargs): res = func(a, axis=axis, **kwargs) if axis is None: axis = 0 # res is now a scalar, so we can insert this anywhere return np.expand_dims(res, axis=axis) return wrapped class TestTakeAlongAxis: def test_argequivalent(self): """ Test it translates from arg<func> to <func> """ from numpy.random import rand a = rand(3, 4, 5) funcs = [ (np.sort, np.argsort, dict()), (_add_keepdims(np.min), _add_keepdims(np.argmin), dict()), (_add_keepdims(np.max), _add_keepdims(np.argmax), dict()), (np.partition, np.argpartition, dict(kth=2)), ] for func, argfunc, kwargs in funcs: for axis in list(range(a.ndim)) + [None]: a_func = func(a, axis=axis, **kwargs) ai_func = argfunc(a, axis=axis, **kwargs) assert_equal(a_func, take_along_axis(a, ai_func, axis=axis)) def test_invalid(self): """ Test it errors when indices has too few dimensions """ a = np.ones((10, 10)) ai = np.ones((10, 2), dtype=np.intp) # sanity check take_along_axis(a, ai, axis=1) # not enough indices assert_raises(ValueError, take_along_axis, a, np.array(1), axis=1) # bool arrays not allowed assert_raises(IndexError, take_along_axis, a, ai.astype(bool), axis=1) # float arrays not allowed assert_raises(IndexError, take_along_axis, a, ai.astype(float), axis=1) # invalid axis assert_raises(np.AxisError, take_along_axis, a, ai, axis=10) def test_empty(self): """ Test everything is ok with empty results, even with inserted dims """ a = np.ones((3, 4, 5)) ai = np.ones((3, 0, 5), dtype=np.intp) actual = take_along_axis(a, ai, axis=1) assert_equal(actual.shape, ai.shape) def test_broadcast(self): """ Test that non-indexing dimensions are broadcast in both directions """ a = np.ones((3, 4, 1)) ai = np.ones((1, 2, 5), dtype=np.intp) actual = take_along_axis(a, ai, axis=1) assert_equal(actual.shape, (3, 2, 5)) class TestPutAlongAxis: def test_replace_max(self): a_base = np.array([[10, 30, 20], [60, 40, 50]]) for axis in list(range(a_base.ndim)) + [None]: # we mutate this in the loop a = a_base.copy() # replace the max with a small value i_max = _add_keepdims(np.argmax)(a, axis=axis) put_along_axis(a, i_max, -99, axis=axis) # find the new minimum, which should max i_min = _add_keepdims(np.argmin)(a, axis=axis) assert_equal(i_min, i_max) def test_broadcast(self): """ Test that non-indexing dimensions are broadcast in both directions """ a = np.ones((3, 4, 1)) ai = np.arange(10, dtype=np.intp).reshape((1, 2, 5)) % 4 put_along_axis(a, ai, 20, axis=1) assert_equal(take_along_axis(a, ai, axis=1), 20) class TestApplyAlongAxis: def test_simple(self): a = np.ones((20, 10), 'd') assert_array_equal( apply_along_axis(len, 0, a), len(a)*np.ones(a.shape[1])) def test_simple101(self): a = np.ones((10, 101), 'd') assert_array_equal( apply_along_axis(len, 0, a), len(a)*np.ones(a.shape[1])) def test_3d(self): a = np.arange(27).reshape((3, 3, 3)) assert_array_equal(apply_along_axis(np.sum, 0, a), [[27, 30, 33], [36, 39, 42], [45, 48, 51]]) def test_preserve_subclass(self): def double(row): return row * 2 class MyNDArray(np.ndarray): pass m = np.array([[0, 1], [2, 3]]).view(MyNDArray) expected = np.array([[0, 2], [4, 6]]).view(MyNDArray) result = apply_along_axis(double, 0, m) assert_(isinstance(result, MyNDArray)) assert_array_equal(result, expected) result = apply_along_axis(double, 1, m) assert_(isinstance(result, MyNDArray)) assert_array_equal(result, expected) def test_subclass(self): class MinimalSubclass(np.ndarray): data = 1 def minimal_function(array): return array.data a = np.zeros((6, 3)).view(MinimalSubclass) assert_array_equal( apply_along_axis(minimal_function, 0, a), np.array([1, 1, 1]) ) def test_scalar_array(self, cls=np.ndarray): a = np.ones((6, 3)).view(cls) res = apply_along_axis(np.sum, 0, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([6, 6, 6]).view(cls)) def test_0d_array(self, cls=np.ndarray): def sum_to_0d(x): """ Sum x, returning a 0d array of the same class """ assert_equal(x.ndim, 1) return np.squeeze(np.sum(x, keepdims=True)) a = np.ones((6, 3)).view(cls) res = apply_along_axis(sum_to_0d, 0, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([6, 6, 6]).view(cls)) res = apply_along_axis(sum_to_0d, 1, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([3, 3, 3, 3, 3, 3]).view(cls)) def test_axis_insertion(self, cls=np.ndarray): def f1to2(x): """produces an asymmetric non-square matrix from x""" assert_equal(x.ndim, 1) return (x[::-1] * x[1:,None]).view(cls) a2d = np.arange(6*3).reshape((6, 3)) # 2d insertion along first axis actual = apply_along_axis(f1to2, 0, a2d) expected = np.stack([ f1to2(a2d[:,i]) for i in range(a2d.shape[1]) ], axis=-1).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) # 2d insertion along last axis actual = apply_along_axis(f1to2, 1, a2d) expected = np.stack([ f1to2(a2d[i,:]) for i in range(a2d.shape[0]) ], axis=0).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) # 3d insertion along middle axis a3d = np.arange(6*5*3).reshape((6, 5, 3)) actual = apply_along_axis(f1to2, 1, a3d) expected = np.stack([ np.stack([ f1to2(a3d[i,:,j]) for i in range(a3d.shape[0]) ], axis=0) for j in range(a3d.shape[2]) ], axis=-1).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) def test_subclass_preservation(self): class MinimalSubclass(np.ndarray): pass self.test_scalar_array(MinimalSubclass) self.test_0d_array(MinimalSubclass) self.test_axis_insertion(MinimalSubclass) def test_axis_insertion_ma(self): def f1to2(x): """produces an asymmetric non-square matrix from x""" assert_equal(x.ndim, 1) res = x[::-1] * x[1:,None] return np.ma.masked_where(res%5==0, res) a = np.arange(6*3).reshape((6, 3)) res = apply_along_axis(f1to2, 0, a) assert_(isinstance(res, np.ma.masked_array)) assert_equal(res.ndim, 3) assert_array_equal(res[:,:,0].mask, f1to2(a[:,0]).mask) assert_array_equal(res[:,:,1].mask, f1to2(a[:,1]).mask) assert_array_equal(res[:,:,2].mask, f1to2(a[:,2]).mask) def test_tuple_func1d(self): def sample_1d(x): return x[1], x[0] res = np.apply_along_axis(sample_1d, 1, np.array([[1, 2], [3, 4]])) assert_array_equal(res, np.array([[2, 1], [4, 3]])) def test_empty(self): # can't apply_along_axis when there's no chance to call the function def never_call(x): assert_(False) # should never be reached a = np.empty((0, 0)) assert_raises(ValueError, np.apply_along_axis, never_call, 0, a) assert_raises(ValueError, np.apply_along_axis, never_call, 1, a) # but it's sometimes ok with some non-zero dimensions def empty_to_1(x): assert_(len(x) == 0) return 1 a = np.empty((10, 0)) actual = np.apply_along_axis(empty_to_1, 1, a) assert_equal(actual, np.ones(10)) assert_raises(ValueError, np.apply_along_axis, empty_to_1, 0, a) def test_with_iterable_object(self): # from issue 5248 d = np.array([ [{1, 11}, {2, 22}, {3, 33}], [{4, 44}, {5, 55}, {6, 66}] ]) actual = np.apply_along_axis(lambda a: set.union(*a), 0, d) expected = np.array([{1, 11, 4, 44}, {2, 22, 5, 55}, {3, 33, 6, 66}]) assert_equal(actual, expected) # issue 8642 - assert_equal doesn't detect this! for i in np.ndindex(actual.shape): assert_equal(type(actual[i]), type(expected[i])) class TestApplyOverAxes: def test_simple(self): a = np.arange(24).reshape(2, 3, 4) aoa_a = apply_over_axes(np.sum, a, [0, 2]) assert_array_equal(aoa_a, np.array([[[60], [92], [124]]])) class TestExpandDims: def test_functionality(self): s = (2, 3, 4, 5) a = np.empty(s) for axis in range(-5, 4): b = expand_dims(a, axis) assert_(b.shape[axis] == 1) assert_(np.squeeze(b).shape == s) def test_axis_tuple(self): a = np.empty((3, 3, 3)) assert np.expand_dims(a, axis=(0, 1, 2)).shape == (1, 1, 1, 3, 3, 3) assert np.expand_dims(a, axis=(0, -1, -2)).shape == (1, 3, 3, 3, 1, 1) assert np.expand_dims(a, axis=(0, 3, 5)).shape == (1, 3, 3, 1, 3, 1) assert np.expand_dims(a, axis=(0, -3, -5)).shape == (1, 1, 3, 1, 3, 3) def test_axis_out_of_range(self): s = (2, 3, 4, 5) a = np.empty(s) assert_raises(np.AxisError, expand_dims, a, -6) assert_raises(np.AxisError, expand_dims, a, 5) a = np.empty((3, 3, 3)) assert_raises(np.AxisError, expand_dims, a, (0, -6)) assert_raises(np.AxisError, expand_dims, a, (0, 5)) def test_repeated_axis(self): a = np.empty((3, 3, 3)) assert_raises(ValueError, expand_dims, a, axis=(1, 1)) def test_subclasses(self): a = np.arange(10).reshape((2, 5)) a = np.ma.array(a, mask=a%3 == 0) expanded = np.expand_dims(a, axis=1) assert_(isinstance(expanded, np.ma.MaskedArray)) assert_equal(expanded.shape, (2, 1, 5)) assert_equal(expanded.mask.shape, (2, 1, 5)) class TestArraySplit: def test_integer_0_split(self): a = np.arange(10) assert_raises(ValueError, array_split, a, 0) def test_integer_split(self): a = np.arange(10) res = array_split(a, 1) desired = [np.arange(10)] compare_results(res, desired) res = array_split(a, 2) desired = [np.arange(5), np.arange(5, 10)] compare_results(res, desired) res = array_split(a, 3) desired = [np.arange(4), np.arange(4, 7), np.arange(7, 10)] compare_results(res, desired) res = array_split(a, 4) desired = [np.arange(3), np.arange(3, 6), np.arange(6, 8), np.arange(8, 10)] compare_results(res, desired) res = array_split(a, 5) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 8), np.arange(8, 10)] compare_results(res, desired) res = array_split(a, 6) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 7) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 8) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 9) desired = [np.arange(2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 10) desired = [np.arange(1), np.arange(1, 2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 11) desired = [np.arange(1), np.arange(1, 2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10), np.array([])] compare_results(res, desired) def test_integer_split_2D_rows(self): a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3, axis=0) tgt = [np.array([np.arange(10)]), np.array([np.arange(10)]), np.zeros((0, 10))] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) # Same thing for manual splits: res = array_split(a, [0, 1, 2], axis=0) tgt = [np.zeros((0, 10)), np.array([np.arange(10)]), np.array([np.arange(10)])] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) def test_integer_split_2D_cols(self): a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3, axis=-1) desired = [np.array([np.arange(4), np.arange(4)]), np.array([np.arange(4, 7), np.arange(4, 7)]), np.array([np.arange(7, 10), np.arange(7, 10)])] compare_results(res, desired) def test_integer_split_2D_default(self): """ This will fail if we change default axis """ a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3) tgt = [np.array([np.arange(10)]), np.array([np.arange(10)]), np.zeros((0, 10))] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) # perhaps should check higher dimensions @pytest.mark.skipif(not IS_64BIT, reason="Needs 64bit platform") def test_integer_split_2D_rows_greater_max_int32(self): a = np.broadcast_to([0], (1 << 32, 2)) res = array_split(a, 4) chunk = np.broadcast_to([0], (1 << 30, 2)) tgt = [chunk] * 4 for i in range(len(tgt)): assert_equal(res[i].shape, tgt[i].shape) def test_index_split_simple(self): a = np.arange(10) indices = [1, 5, 7] res = array_split(a, indices, axis=-1) desired = [np.arange(0, 1), np.arange(1, 5), np.arange(5, 7), np.arange(7, 10)] compare_results(res, desired) def test_index_split_low_bound(self): a = np.arange(10) indices = [0, 5, 7] res = array_split(a, indices, axis=-1) desired = [np.array([]), np.arange(0, 5), np.arange(5, 7), np.arange(7, 10)] compare_results(res, desired) def test_index_split_high_bound(self): a = np.arange(10) indices = [0, 5, 7, 10, 12] res = array_split(a, indices, axis=-1) desired = [np.array([]), np.arange(0, 5), np.arange(5, 7), np.arange(7, 10), np.array([]), np.array([])] compare_results(res, desired) class TestSplit: # The split function is essentially the same as array_split, # except that it test if splitting will result in an # equal split. Only test for this case. def test_equal_split(self): a = np.arange(10) res = split(a, 2) desired = [np.arange(5), np.arange(5, 10)] compare_results(res, desired) def test_unequal_split(self): a = np.arange(10) assert_raises(ValueError, split, a, 3) class TestColumnStack: def test_non_iterable(self): assert_raises(TypeError, column_stack, 1) def test_1D_arrays(self): # example from docstring a = np.array((1, 2, 3)) b = np.array((2, 3, 4)) expected = np.array([[1, 2], [2, 3], [3, 4]]) actual = np.column_stack((a, b)) assert_equal(actual, expected) def test_2D_arrays(self): # same as hstack 2D docstring example a = np.array([[1], [2], [3]]) b = np.array([[2], [3], [4]]) expected = np.array([[1, 2], [2, 3], [3, 4]]) actual = np.column_stack((a, b)) assert_equal(actual, expected) def test_generator(self): with assert_warns(FutureWarning): column_stack((np.arange(3) for _ in range(2))) class TestDstack: def test_non_iterable(self): assert_raises(TypeError, dstack, 1) def test_0D_array(self): a = np.array(1) b = np.array(2) res = dstack([a, b]) desired = np.array([[[1, 2]]]) assert_array_equal(res, desired) def test_1D_array(self): a = np.array([1]) b = np.array([2]) res = dstack([a, b]) desired = np.array([[[1, 2]]]) assert_array_equal(res, desired) def test_2D_array(self): a = np.array([[1], [2]]) b = np.array([[1], [2]]) res = dstack([a, b]) desired = np.array([[[1, 1]], [[2, 2, ]]]) assert_array_equal(res, desired) def test_2D_array2(self): a = np.array([1, 2]) b = np.array([1, 2]) res = dstack([a, b]) desired = np.array([[[1, 1], [2, 2]]]) assert_array_equal(res, desired) def test_generator(self): with assert_warns(FutureWarning): dstack((np.arange(3) for _ in range(2))) # array_split has more comprehensive test of splitting. # only do simple test on hsplit, vsplit, and dsplit class TestHsplit: """Only testing for integer splits. """ def test_non_iterable(self): assert_raises(ValueError, hsplit, 1, 1) def test_0D_array(self): a = np.array(1) try: hsplit(a, 2) assert_(0) except ValueError: pass def test_1D_array(self): a = np.array([1, 2, 3, 4]) res = hsplit(a, 2) desired = [np.array([1, 2]), np.array([3, 4])] compare_results(res, desired) def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) res = hsplit(a, 2) desired = [np.array([[1, 2], [1, 2]]), np.array([[3, 4], [3, 4]])] compare_results(res, desired) class TestVsplit: """Only testing for integer splits. """ def test_non_iterable(self): assert_raises(ValueError, vsplit, 1, 1) def test_0D_array(self): a = np.array(1) assert_raises(ValueError, vsplit, a, 2) def test_1D_array(self): a = np.array([1, 2, 3, 4]) try: vsplit(a, 2) assert_(0) except ValueError: pass def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) res = vsplit(a, 2) desired = [np.array([[1, 2, 3, 4]]), np.array([[1, 2, 3, 4]])] compare_results(res, desired) class TestDsplit: # Only testing for integer splits. def test_non_iterable(self): assert_raises(ValueError, dsplit, 1, 1) def test_0D_array(self): a = np.array(1) assert_raises(ValueError, dsplit, a, 2) def test_1D_array(self): a = np.array([1, 2, 3, 4]) assert_raises(ValueError, dsplit, a, 2) def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) try: dsplit(a, 2) assert_(0) except ValueError: pass def test_3D_array(self): a = np.array([[[1, 2, 3, 4], [1, 2, 3, 4]], [[1, 2, 3, 4], [1, 2, 3, 4]]]) res = dsplit(a, 2) desired = [np.array([[[1, 2], [1, 2]], [[1, 2], [1, 2]]]), np.array([[[3, 4], [3, 4]], [[3, 4], [3, 4]]])] compare_results(res, desired) class TestSqueeze: def test_basic(self): from numpy.random import rand a = rand(20, 10, 10, 1, 1) b = rand(20, 1, 10, 1, 20) c = rand(1, 1, 20, 10) assert_array_equal(np.squeeze(a), np.reshape(a, (20, 10, 10))) assert_array_equal(np.squeeze(b), np.reshape(b, (20, 10, 20))) assert_array_equal(np.squeeze(c), np.reshape(c, (20, 10))) # Squeezing to 0-dim should still give an ndarray a = [[[1.5]]] res = np.squeeze(a) assert_equal(res, 1.5) assert_equal(res.ndim, 0) assert_equal(type(res), np.ndarray) class TestKron: def test_return_type(self): class myarray(np.ndarray): __array_priority__ = 0.0 a = np.ones([2, 2]) ma = myarray(a.shape, a.dtype, a.data) assert_equal(type(kron(a, a)), np.ndarray) assert_equal(type(kron(ma, ma)), myarray) assert_equal(type(kron(a, ma)), np.ndarray) assert_equal(type(kron(ma, a)), myarray) class TestTile: def test_basic(self): a = np.array([0, 1, 2]) b = [[1, 2], [3, 4]] assert_equal(tile(a, 2), [0, 1, 2, 0, 1, 2]) assert_equal(tile(a, (2, 2)), [[0, 1, 2, 0, 1, 2], [0, 1, 2, 0, 1, 2]]) assert_equal(tile(a, (1, 2)), [[0, 1, 2, 0, 1, 2]]) assert_equal(tile(b, 2), [[1, 2, 1, 2], [3, 4, 3, 4]]) assert_equal(tile(b, (2, 1)), [[1, 2], [3, 4], [1, 2], [3, 4]]) assert_equal(tile(b, (2, 2)), [[1, 2, 1, 2], [3, 4, 3, 4], [1, 2, 1, 2], [3, 4, 3, 4]]) def test_tile_one_repetition_on_array_gh4679(self): a = np.arange(5) b = tile(a, 1) b += 2 assert_equal(a, np.arange(5)) def test_empty(self): a = np.array([[[]]]) b = np.array([[], []]) c = tile(b, 2).shape d = tile(a, (3, 2, 5)).shape assert_equal(c, (2, 0)) assert_equal(d, (3, 2, 0)) def test_kroncompare(self): from numpy.random import randint reps = [(2,), (1, 2), (2, 1), (2, 2), (2, 3, 2), (3, 2)] shape = [(3,), (2, 3), (3, 4, 3), (3, 2, 3), (4, 3, 2, 4), (2, 2)] for s in shape: b = randint(0, 10, size=s) for r in reps: a = np.ones(r, b.dtype) large = tile(b, r) klarge = kron(a, b) assert_equal(large, klarge) class TestMayShareMemory: def test_basic(self): d = np.ones((50, 60)) d2 = np.ones((30, 60, 6)) assert_(np.may_share_memory(d, d)) assert_(np.may_share_memory(d, d[::-1])) assert_(np.may_share_memory(d, d[::2])) assert_(np.may_share_memory(d, d[1:, ::-1])) assert_(not np.may_share_memory(d[::-1], d2)) assert_(not np.may_share_memory(d[::2], d2)) assert_(not np.may_share_memory(d[1:, ::-1], d2)) assert_(np.may_share_memory(d2[1:, ::-1], d2)) # Utility def compare_results(res, desired): for i in range(len(desired)): assert_array_equal(res[i], desired[i])
mhvk/numpy
numpy/lib/tests/test_shape_base.py
numpy/core/_methods.py
""" Test machar. Given recent changes to hardcode type data, we might want to get rid of both MachAr and this test at some point. """ from numpy.core.machar import MachAr import numpy.core.numerictypes as ntypes from numpy import errstate, array class TestMachAr: def _run_machar_highprec(self): # Instantiate MachAr instance with high enough precision to cause # underflow try: hiprec = ntypes.float96 MachAr(lambda v: array(v, hiprec)) except AttributeError: # Fixme, this needs to raise a 'skip' exception. "Skipping test: no ntypes.float96 available on this platform." def test_underlow(self): # Regression test for #759: # instantiating MachAr for dtype = np.float96 raises spurious warning. with errstate(all='raise'): try: self._run_machar_highprec() except FloatingPointError as e: msg = "Caught %s exception, should not have been raised." % e raise AssertionError(msg)
import numpy as np import functools import sys import pytest from numpy.lib.shape_base import ( apply_along_axis, apply_over_axes, array_split, split, hsplit, dsplit, vsplit, dstack, column_stack, kron, tile, expand_dims, take_along_axis, put_along_axis ) from numpy.testing import ( assert_, assert_equal, assert_array_equal, assert_raises, assert_warns ) IS_64BIT = sys.maxsize > 2**32 def _add_keepdims(func): """ hack in keepdims behavior into a function taking an axis """ @functools.wraps(func) def wrapped(a, axis, **kwargs): res = func(a, axis=axis, **kwargs) if axis is None: axis = 0 # res is now a scalar, so we can insert this anywhere return np.expand_dims(res, axis=axis) return wrapped class TestTakeAlongAxis: def test_argequivalent(self): """ Test it translates from arg<func> to <func> """ from numpy.random import rand a = rand(3, 4, 5) funcs = [ (np.sort, np.argsort, dict()), (_add_keepdims(np.min), _add_keepdims(np.argmin), dict()), (_add_keepdims(np.max), _add_keepdims(np.argmax), dict()), (np.partition, np.argpartition, dict(kth=2)), ] for func, argfunc, kwargs in funcs: for axis in list(range(a.ndim)) + [None]: a_func = func(a, axis=axis, **kwargs) ai_func = argfunc(a, axis=axis, **kwargs) assert_equal(a_func, take_along_axis(a, ai_func, axis=axis)) def test_invalid(self): """ Test it errors when indices has too few dimensions """ a = np.ones((10, 10)) ai = np.ones((10, 2), dtype=np.intp) # sanity check take_along_axis(a, ai, axis=1) # not enough indices assert_raises(ValueError, take_along_axis, a, np.array(1), axis=1) # bool arrays not allowed assert_raises(IndexError, take_along_axis, a, ai.astype(bool), axis=1) # float arrays not allowed assert_raises(IndexError, take_along_axis, a, ai.astype(float), axis=1) # invalid axis assert_raises(np.AxisError, take_along_axis, a, ai, axis=10) def test_empty(self): """ Test everything is ok with empty results, even with inserted dims """ a = np.ones((3, 4, 5)) ai = np.ones((3, 0, 5), dtype=np.intp) actual = take_along_axis(a, ai, axis=1) assert_equal(actual.shape, ai.shape) def test_broadcast(self): """ Test that non-indexing dimensions are broadcast in both directions """ a = np.ones((3, 4, 1)) ai = np.ones((1, 2, 5), dtype=np.intp) actual = take_along_axis(a, ai, axis=1) assert_equal(actual.shape, (3, 2, 5)) class TestPutAlongAxis: def test_replace_max(self): a_base = np.array([[10, 30, 20], [60, 40, 50]]) for axis in list(range(a_base.ndim)) + [None]: # we mutate this in the loop a = a_base.copy() # replace the max with a small value i_max = _add_keepdims(np.argmax)(a, axis=axis) put_along_axis(a, i_max, -99, axis=axis) # find the new minimum, which should max i_min = _add_keepdims(np.argmin)(a, axis=axis) assert_equal(i_min, i_max) def test_broadcast(self): """ Test that non-indexing dimensions are broadcast in both directions """ a = np.ones((3, 4, 1)) ai = np.arange(10, dtype=np.intp).reshape((1, 2, 5)) % 4 put_along_axis(a, ai, 20, axis=1) assert_equal(take_along_axis(a, ai, axis=1), 20) class TestApplyAlongAxis: def test_simple(self): a = np.ones((20, 10), 'd') assert_array_equal( apply_along_axis(len, 0, a), len(a)*np.ones(a.shape[1])) def test_simple101(self): a = np.ones((10, 101), 'd') assert_array_equal( apply_along_axis(len, 0, a), len(a)*np.ones(a.shape[1])) def test_3d(self): a = np.arange(27).reshape((3, 3, 3)) assert_array_equal(apply_along_axis(np.sum, 0, a), [[27, 30, 33], [36, 39, 42], [45, 48, 51]]) def test_preserve_subclass(self): def double(row): return row * 2 class MyNDArray(np.ndarray): pass m = np.array([[0, 1], [2, 3]]).view(MyNDArray) expected = np.array([[0, 2], [4, 6]]).view(MyNDArray) result = apply_along_axis(double, 0, m) assert_(isinstance(result, MyNDArray)) assert_array_equal(result, expected) result = apply_along_axis(double, 1, m) assert_(isinstance(result, MyNDArray)) assert_array_equal(result, expected) def test_subclass(self): class MinimalSubclass(np.ndarray): data = 1 def minimal_function(array): return array.data a = np.zeros((6, 3)).view(MinimalSubclass) assert_array_equal( apply_along_axis(minimal_function, 0, a), np.array([1, 1, 1]) ) def test_scalar_array(self, cls=np.ndarray): a = np.ones((6, 3)).view(cls) res = apply_along_axis(np.sum, 0, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([6, 6, 6]).view(cls)) def test_0d_array(self, cls=np.ndarray): def sum_to_0d(x): """ Sum x, returning a 0d array of the same class """ assert_equal(x.ndim, 1) return np.squeeze(np.sum(x, keepdims=True)) a = np.ones((6, 3)).view(cls) res = apply_along_axis(sum_to_0d, 0, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([6, 6, 6]).view(cls)) res = apply_along_axis(sum_to_0d, 1, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([3, 3, 3, 3, 3, 3]).view(cls)) def test_axis_insertion(self, cls=np.ndarray): def f1to2(x): """produces an asymmetric non-square matrix from x""" assert_equal(x.ndim, 1) return (x[::-1] * x[1:,None]).view(cls) a2d = np.arange(6*3).reshape((6, 3)) # 2d insertion along first axis actual = apply_along_axis(f1to2, 0, a2d) expected = np.stack([ f1to2(a2d[:,i]) for i in range(a2d.shape[1]) ], axis=-1).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) # 2d insertion along last axis actual = apply_along_axis(f1to2, 1, a2d) expected = np.stack([ f1to2(a2d[i,:]) for i in range(a2d.shape[0]) ], axis=0).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) # 3d insertion along middle axis a3d = np.arange(6*5*3).reshape((6, 5, 3)) actual = apply_along_axis(f1to2, 1, a3d) expected = np.stack([ np.stack([ f1to2(a3d[i,:,j]) for i in range(a3d.shape[0]) ], axis=0) for j in range(a3d.shape[2]) ], axis=-1).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) def test_subclass_preservation(self): class MinimalSubclass(np.ndarray): pass self.test_scalar_array(MinimalSubclass) self.test_0d_array(MinimalSubclass) self.test_axis_insertion(MinimalSubclass) def test_axis_insertion_ma(self): def f1to2(x): """produces an asymmetric non-square matrix from x""" assert_equal(x.ndim, 1) res = x[::-1] * x[1:,None] return np.ma.masked_where(res%5==0, res) a = np.arange(6*3).reshape((6, 3)) res = apply_along_axis(f1to2, 0, a) assert_(isinstance(res, np.ma.masked_array)) assert_equal(res.ndim, 3) assert_array_equal(res[:,:,0].mask, f1to2(a[:,0]).mask) assert_array_equal(res[:,:,1].mask, f1to2(a[:,1]).mask) assert_array_equal(res[:,:,2].mask, f1to2(a[:,2]).mask) def test_tuple_func1d(self): def sample_1d(x): return x[1], x[0] res = np.apply_along_axis(sample_1d, 1, np.array([[1, 2], [3, 4]])) assert_array_equal(res, np.array([[2, 1], [4, 3]])) def test_empty(self): # can't apply_along_axis when there's no chance to call the function def never_call(x): assert_(False) # should never be reached a = np.empty((0, 0)) assert_raises(ValueError, np.apply_along_axis, never_call, 0, a) assert_raises(ValueError, np.apply_along_axis, never_call, 1, a) # but it's sometimes ok with some non-zero dimensions def empty_to_1(x): assert_(len(x) == 0) return 1 a = np.empty((10, 0)) actual = np.apply_along_axis(empty_to_1, 1, a) assert_equal(actual, np.ones(10)) assert_raises(ValueError, np.apply_along_axis, empty_to_1, 0, a) def test_with_iterable_object(self): # from issue 5248 d = np.array([ [{1, 11}, {2, 22}, {3, 33}], [{4, 44}, {5, 55}, {6, 66}] ]) actual = np.apply_along_axis(lambda a: set.union(*a), 0, d) expected = np.array([{1, 11, 4, 44}, {2, 22, 5, 55}, {3, 33, 6, 66}]) assert_equal(actual, expected) # issue 8642 - assert_equal doesn't detect this! for i in np.ndindex(actual.shape): assert_equal(type(actual[i]), type(expected[i])) class TestApplyOverAxes: def test_simple(self): a = np.arange(24).reshape(2, 3, 4) aoa_a = apply_over_axes(np.sum, a, [0, 2]) assert_array_equal(aoa_a, np.array([[[60], [92], [124]]])) class TestExpandDims: def test_functionality(self): s = (2, 3, 4, 5) a = np.empty(s) for axis in range(-5, 4): b = expand_dims(a, axis) assert_(b.shape[axis] == 1) assert_(np.squeeze(b).shape == s) def test_axis_tuple(self): a = np.empty((3, 3, 3)) assert np.expand_dims(a, axis=(0, 1, 2)).shape == (1, 1, 1, 3, 3, 3) assert np.expand_dims(a, axis=(0, -1, -2)).shape == (1, 3, 3, 3, 1, 1) assert np.expand_dims(a, axis=(0, 3, 5)).shape == (1, 3, 3, 1, 3, 1) assert np.expand_dims(a, axis=(0, -3, -5)).shape == (1, 1, 3, 1, 3, 3) def test_axis_out_of_range(self): s = (2, 3, 4, 5) a = np.empty(s) assert_raises(np.AxisError, expand_dims, a, -6) assert_raises(np.AxisError, expand_dims, a, 5) a = np.empty((3, 3, 3)) assert_raises(np.AxisError, expand_dims, a, (0, -6)) assert_raises(np.AxisError, expand_dims, a, (0, 5)) def test_repeated_axis(self): a = np.empty((3, 3, 3)) assert_raises(ValueError, expand_dims, a, axis=(1, 1)) def test_subclasses(self): a = np.arange(10).reshape((2, 5)) a = np.ma.array(a, mask=a%3 == 0) expanded = np.expand_dims(a, axis=1) assert_(isinstance(expanded, np.ma.MaskedArray)) assert_equal(expanded.shape, (2, 1, 5)) assert_equal(expanded.mask.shape, (2, 1, 5)) class TestArraySplit: def test_integer_0_split(self): a = np.arange(10) assert_raises(ValueError, array_split, a, 0) def test_integer_split(self): a = np.arange(10) res = array_split(a, 1) desired = [np.arange(10)] compare_results(res, desired) res = array_split(a, 2) desired = [np.arange(5), np.arange(5, 10)] compare_results(res, desired) res = array_split(a, 3) desired = [np.arange(4), np.arange(4, 7), np.arange(7, 10)] compare_results(res, desired) res = array_split(a, 4) desired = [np.arange(3), np.arange(3, 6), np.arange(6, 8), np.arange(8, 10)] compare_results(res, desired) res = array_split(a, 5) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 8), np.arange(8, 10)] compare_results(res, desired) res = array_split(a, 6) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 7) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 8) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 9) desired = [np.arange(2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 10) desired = [np.arange(1), np.arange(1, 2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 11) desired = [np.arange(1), np.arange(1, 2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10), np.array([])] compare_results(res, desired) def test_integer_split_2D_rows(self): a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3, axis=0) tgt = [np.array([np.arange(10)]), np.array([np.arange(10)]), np.zeros((0, 10))] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) # Same thing for manual splits: res = array_split(a, [0, 1, 2], axis=0) tgt = [np.zeros((0, 10)), np.array([np.arange(10)]), np.array([np.arange(10)])] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) def test_integer_split_2D_cols(self): a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3, axis=-1) desired = [np.array([np.arange(4), np.arange(4)]), np.array([np.arange(4, 7), np.arange(4, 7)]), np.array([np.arange(7, 10), np.arange(7, 10)])] compare_results(res, desired) def test_integer_split_2D_default(self): """ This will fail if we change default axis """ a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3) tgt = [np.array([np.arange(10)]), np.array([np.arange(10)]), np.zeros((0, 10))] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) # perhaps should check higher dimensions @pytest.mark.skipif(not IS_64BIT, reason="Needs 64bit platform") def test_integer_split_2D_rows_greater_max_int32(self): a = np.broadcast_to([0], (1 << 32, 2)) res = array_split(a, 4) chunk = np.broadcast_to([0], (1 << 30, 2)) tgt = [chunk] * 4 for i in range(len(tgt)): assert_equal(res[i].shape, tgt[i].shape) def test_index_split_simple(self): a = np.arange(10) indices = [1, 5, 7] res = array_split(a, indices, axis=-1) desired = [np.arange(0, 1), np.arange(1, 5), np.arange(5, 7), np.arange(7, 10)] compare_results(res, desired) def test_index_split_low_bound(self): a = np.arange(10) indices = [0, 5, 7] res = array_split(a, indices, axis=-1) desired = [np.array([]), np.arange(0, 5), np.arange(5, 7), np.arange(7, 10)] compare_results(res, desired) def test_index_split_high_bound(self): a = np.arange(10) indices = [0, 5, 7, 10, 12] res = array_split(a, indices, axis=-1) desired = [np.array([]), np.arange(0, 5), np.arange(5, 7), np.arange(7, 10), np.array([]), np.array([])] compare_results(res, desired) class TestSplit: # The split function is essentially the same as array_split, # except that it test if splitting will result in an # equal split. Only test for this case. def test_equal_split(self): a = np.arange(10) res = split(a, 2) desired = [np.arange(5), np.arange(5, 10)] compare_results(res, desired) def test_unequal_split(self): a = np.arange(10) assert_raises(ValueError, split, a, 3) class TestColumnStack: def test_non_iterable(self): assert_raises(TypeError, column_stack, 1) def test_1D_arrays(self): # example from docstring a = np.array((1, 2, 3)) b = np.array((2, 3, 4)) expected = np.array([[1, 2], [2, 3], [3, 4]]) actual = np.column_stack((a, b)) assert_equal(actual, expected) def test_2D_arrays(self): # same as hstack 2D docstring example a = np.array([[1], [2], [3]]) b = np.array([[2], [3], [4]]) expected = np.array([[1, 2], [2, 3], [3, 4]]) actual = np.column_stack((a, b)) assert_equal(actual, expected) def test_generator(self): with assert_warns(FutureWarning): column_stack((np.arange(3) for _ in range(2))) class TestDstack: def test_non_iterable(self): assert_raises(TypeError, dstack, 1) def test_0D_array(self): a = np.array(1) b = np.array(2) res = dstack([a, b]) desired = np.array([[[1, 2]]]) assert_array_equal(res, desired) def test_1D_array(self): a = np.array([1]) b = np.array([2]) res = dstack([a, b]) desired = np.array([[[1, 2]]]) assert_array_equal(res, desired) def test_2D_array(self): a = np.array([[1], [2]]) b = np.array([[1], [2]]) res = dstack([a, b]) desired = np.array([[[1, 1]], [[2, 2, ]]]) assert_array_equal(res, desired) def test_2D_array2(self): a = np.array([1, 2]) b = np.array([1, 2]) res = dstack([a, b]) desired = np.array([[[1, 1], [2, 2]]]) assert_array_equal(res, desired) def test_generator(self): with assert_warns(FutureWarning): dstack((np.arange(3) for _ in range(2))) # array_split has more comprehensive test of splitting. # only do simple test on hsplit, vsplit, and dsplit class TestHsplit: """Only testing for integer splits. """ def test_non_iterable(self): assert_raises(ValueError, hsplit, 1, 1) def test_0D_array(self): a = np.array(1) try: hsplit(a, 2) assert_(0) except ValueError: pass def test_1D_array(self): a = np.array([1, 2, 3, 4]) res = hsplit(a, 2) desired = [np.array([1, 2]), np.array([3, 4])] compare_results(res, desired) def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) res = hsplit(a, 2) desired = [np.array([[1, 2], [1, 2]]), np.array([[3, 4], [3, 4]])] compare_results(res, desired) class TestVsplit: """Only testing for integer splits. """ def test_non_iterable(self): assert_raises(ValueError, vsplit, 1, 1) def test_0D_array(self): a = np.array(1) assert_raises(ValueError, vsplit, a, 2) def test_1D_array(self): a = np.array([1, 2, 3, 4]) try: vsplit(a, 2) assert_(0) except ValueError: pass def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) res = vsplit(a, 2) desired = [np.array([[1, 2, 3, 4]]), np.array([[1, 2, 3, 4]])] compare_results(res, desired) class TestDsplit: # Only testing for integer splits. def test_non_iterable(self): assert_raises(ValueError, dsplit, 1, 1) def test_0D_array(self): a = np.array(1) assert_raises(ValueError, dsplit, a, 2) def test_1D_array(self): a = np.array([1, 2, 3, 4]) assert_raises(ValueError, dsplit, a, 2) def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) try: dsplit(a, 2) assert_(0) except ValueError: pass def test_3D_array(self): a = np.array([[[1, 2, 3, 4], [1, 2, 3, 4]], [[1, 2, 3, 4], [1, 2, 3, 4]]]) res = dsplit(a, 2) desired = [np.array([[[1, 2], [1, 2]], [[1, 2], [1, 2]]]), np.array([[[3, 4], [3, 4]], [[3, 4], [3, 4]]])] compare_results(res, desired) class TestSqueeze: def test_basic(self): from numpy.random import rand a = rand(20, 10, 10, 1, 1) b = rand(20, 1, 10, 1, 20) c = rand(1, 1, 20, 10) assert_array_equal(np.squeeze(a), np.reshape(a, (20, 10, 10))) assert_array_equal(np.squeeze(b), np.reshape(b, (20, 10, 20))) assert_array_equal(np.squeeze(c), np.reshape(c, (20, 10))) # Squeezing to 0-dim should still give an ndarray a = [[[1.5]]] res = np.squeeze(a) assert_equal(res, 1.5) assert_equal(res.ndim, 0) assert_equal(type(res), np.ndarray) class TestKron: def test_return_type(self): class myarray(np.ndarray): __array_priority__ = 0.0 a = np.ones([2, 2]) ma = myarray(a.shape, a.dtype, a.data) assert_equal(type(kron(a, a)), np.ndarray) assert_equal(type(kron(ma, ma)), myarray) assert_equal(type(kron(a, ma)), np.ndarray) assert_equal(type(kron(ma, a)), myarray) class TestTile: def test_basic(self): a = np.array([0, 1, 2]) b = [[1, 2], [3, 4]] assert_equal(tile(a, 2), [0, 1, 2, 0, 1, 2]) assert_equal(tile(a, (2, 2)), [[0, 1, 2, 0, 1, 2], [0, 1, 2, 0, 1, 2]]) assert_equal(tile(a, (1, 2)), [[0, 1, 2, 0, 1, 2]]) assert_equal(tile(b, 2), [[1, 2, 1, 2], [3, 4, 3, 4]]) assert_equal(tile(b, (2, 1)), [[1, 2], [3, 4], [1, 2], [3, 4]]) assert_equal(tile(b, (2, 2)), [[1, 2, 1, 2], [3, 4, 3, 4], [1, 2, 1, 2], [3, 4, 3, 4]]) def test_tile_one_repetition_on_array_gh4679(self): a = np.arange(5) b = tile(a, 1) b += 2 assert_equal(a, np.arange(5)) def test_empty(self): a = np.array([[[]]]) b = np.array([[], []]) c = tile(b, 2).shape d = tile(a, (3, 2, 5)).shape assert_equal(c, (2, 0)) assert_equal(d, (3, 2, 0)) def test_kroncompare(self): from numpy.random import randint reps = [(2,), (1, 2), (2, 1), (2, 2), (2, 3, 2), (3, 2)] shape = [(3,), (2, 3), (3, 4, 3), (3, 2, 3), (4, 3, 2, 4), (2, 2)] for s in shape: b = randint(0, 10, size=s) for r in reps: a = np.ones(r, b.dtype) large = tile(b, r) klarge = kron(a, b) assert_equal(large, klarge) class TestMayShareMemory: def test_basic(self): d = np.ones((50, 60)) d2 = np.ones((30, 60, 6)) assert_(np.may_share_memory(d, d)) assert_(np.may_share_memory(d, d[::-1])) assert_(np.may_share_memory(d, d[::2])) assert_(np.may_share_memory(d, d[1:, ::-1])) assert_(not np.may_share_memory(d[::-1], d2)) assert_(not np.may_share_memory(d[::2], d2)) assert_(not np.may_share_memory(d[1:, ::-1], d2)) assert_(np.may_share_memory(d2[1:, ::-1], d2)) # Utility def compare_results(res, desired): for i in range(len(desired)): assert_array_equal(res[i], desired[i])
mhvk/numpy
numpy/lib/tests/test_shape_base.py
numpy/core/tests/test_machar.py
import os import re import sys import shlex import time import subprocess from copy import copy from distutils import ccompiler from distutils.ccompiler import ( compiler_class, gen_lib_options, get_default_compiler, new_compiler, CCompiler ) from distutils.errors import ( DistutilsExecError, DistutilsModuleError, DistutilsPlatformError, CompileError, UnknownFileError ) from distutils.sysconfig import customize_compiler from distutils.version import LooseVersion from numpy.distutils import log from numpy.distutils.exec_command import ( filepath_from_subprocess_output, forward_bytes_to_stdout ) from numpy.distutils.misc_util import cyg2win32, is_sequence, mingw32, \ get_num_build_jobs, \ _commandline_dep_string # globals for parallel build management import threading _job_semaphore = None _global_lock = threading.Lock() _processing_files = set() def _needs_build(obj, cc_args, extra_postargs, pp_opts): """ Check if an objects needs to be rebuild based on its dependencies Parameters ---------- obj : str object file Returns ------- bool """ # defined in unixcompiler.py dep_file = obj + '.d' if not os.path.exists(dep_file): return True # dep_file is a makefile containing 'object: dependencies' # formatted like posix shell (spaces escaped, \ line continuations) # the last line contains the compiler commandline arguments as some # projects may compile an extension multiple times with different # arguments with open(dep_file, "r") as f: lines = f.readlines() cmdline =_commandline_dep_string(cc_args, extra_postargs, pp_opts) last_cmdline = lines[-1] if last_cmdline != cmdline: return True contents = ''.join(lines[:-1]) deps = [x for x in shlex.split(contents, posix=True) if x != "\n" and not x.endswith(":")] try: t_obj = os.stat(obj).st_mtime # check if any of the dependencies is newer than the object # the dependencies includes the source used to create the object for f in deps: if os.stat(f).st_mtime > t_obj: return True except OSError: # no object counts as newer (shouldn't happen if dep_file exists) return True return False def replace_method(klass, method_name, func): # Py3k does not have unbound method anymore, MethodType does not work m = lambda self, *args, **kw: func(self, *args, **kw) setattr(klass, method_name, m) ###################################################################### ## Method that subclasses may redefine. But don't call this method, ## it i private to CCompiler class and may return unexpected ## results if used elsewhere. So, you have been warned.. def CCompiler_find_executables(self): """ Does nothing here, but is called by the get_version method and can be overridden by subclasses. In particular it is redefined in the `FCompiler` class where more documentation can be found. """ pass replace_method(CCompiler, 'find_executables', CCompiler_find_executables) # Using customized CCompiler.spawn. def CCompiler_spawn(self, cmd, display=None): """ Execute a command in a sub-process. Parameters ---------- cmd : str The command to execute. display : str or sequence of str, optional The text to add to the log file kept by `numpy.distutils`. If not given, `display` is equal to `cmd`. Returns ------- None Raises ------ DistutilsExecError If the command failed, i.e. the exit status was not 0. """ if display is None: display = cmd if is_sequence(display): display = ' '.join(list(display)) log.info(display) try: if self.verbose: subprocess.check_output(cmd) else: subprocess.check_output(cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as exc: o = exc.output s = exc.returncode except OSError: # OSError doesn't have the same hooks for the exception # output, but exec_command() historically would use an # empty string for EnvironmentError (base class for # OSError) o = b'' # status previously used by exec_command() for parent # of OSError s = 127 else: # use a convenience return here so that any kind of # caught exception will execute the default code after the # try / except block, which handles various exceptions return None if is_sequence(cmd): cmd = ' '.join(list(cmd)) if self.verbose: forward_bytes_to_stdout(o) if re.search(b'Too many open files', o): msg = '\nTry rerunning setup command until build succeeds.' else: msg = '' raise DistutilsExecError('Command "%s" failed with exit status %d%s' % (cmd, s, msg)) replace_method(CCompiler, 'spawn', CCompiler_spawn) def CCompiler_object_filenames(self, source_filenames, strip_dir=0, output_dir=''): """ Return the name of the object files for the given source files. Parameters ---------- source_filenames : list of str The list of paths to source files. Paths can be either relative or absolute, this is handled transparently. strip_dir : bool, optional Whether to strip the directory from the returned paths. If True, the file name prepended by `output_dir` is returned. Default is False. output_dir : str, optional If given, this path is prepended to the returned paths to the object files. Returns ------- obj_names : list of str The list of paths to the object files corresponding to the source files in `source_filenames`. """ if output_dir is None: output_dir = '' obj_names = [] for src_name in source_filenames: base, ext = os.path.splitext(os.path.normpath(src_name)) base = os.path.splitdrive(base)[1] # Chop off the drive base = base[os.path.isabs(base):] # If abs, chop off leading / if base.startswith('..'): # Resolve starting relative path components, middle ones # (if any) have been handled by os.path.normpath above. i = base.rfind('..')+2 d = base[:i] d = os.path.basename(os.path.abspath(d)) base = d + base[i:] if ext not in self.src_extensions: raise UnknownFileError("unknown file type '%s' (from '%s')" % (ext, src_name)) if strip_dir: base = os.path.basename(base) obj_name = os.path.join(output_dir, base + self.obj_extension) obj_names.append(obj_name) return obj_names replace_method(CCompiler, 'object_filenames', CCompiler_object_filenames) def CCompiler_compile(self, sources, output_dir=None, macros=None, include_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, depends=None): """ Compile one or more source files. Please refer to the Python distutils API reference for more details. Parameters ---------- sources : list of str A list of filenames output_dir : str, optional Path to the output directory. macros : list of tuples A list of macro definitions. include_dirs : list of str, optional The directories to add to the default include file search path for this compilation only. debug : bool, optional Whether or not to output debug symbols in or alongside the object file(s). extra_preargs, extra_postargs : ? Extra pre- and post-arguments. depends : list of str, optional A list of file names that all targets depend on. Returns ------- objects : list of str A list of object file names, one per source file `sources`. Raises ------ CompileError If compilation fails. """ # This method is effective only with Python >=2.3 distutils. # Any changes here should be applied also to fcompiler.compile # method to support pre Python 2.3 distutils. global _job_semaphore jobs = get_num_build_jobs() # setup semaphore to not exceed number of compile jobs when parallelized at # extension level (python >= 3.5) with _global_lock: if _job_semaphore is None: _job_semaphore = threading.Semaphore(jobs) if not sources: return [] from numpy.distutils.fcompiler import (FCompiler, is_f_file, has_f90_header) if isinstance(self, FCompiler): display = [] for fc in ['f77', 'f90', 'fix']: fcomp = getattr(self, 'compiler_'+fc) if fcomp is None: continue display.append("Fortran %s compiler: %s" % (fc, ' '.join(fcomp))) display = '\n'.join(display) else: ccomp = self.compiler_so display = "C compiler: %s\n" % (' '.join(ccomp),) log.info(display) macros, objects, extra_postargs, pp_opts, build = \ self._setup_compile(output_dir, macros, include_dirs, sources, depends, extra_postargs) cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) display = "compile options: '%s'" % (' '.join(cc_args)) if extra_postargs: display += "\nextra options: '%s'" % (' '.join(extra_postargs)) log.info(display) def single_compile(args): obj, (src, ext) = args if not _needs_build(obj, cc_args, extra_postargs, pp_opts): return # check if we are currently already processing the same object # happens when using the same source in multiple extensions while True: # need explicit lock as there is no atomic check and add with GIL with _global_lock: # file not being worked on, start working if obj not in _processing_files: _processing_files.add(obj) break # wait for the processing to end time.sleep(0.1) try: # retrieve slot from our #job semaphore and build with _job_semaphore: self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) finally: # register being done processing with _global_lock: _processing_files.remove(obj) if isinstance(self, FCompiler): objects_to_build = list(build.keys()) f77_objects, other_objects = [], [] for obj in objects: if obj in objects_to_build: src, ext = build[obj] if self.compiler_type=='absoft': obj = cyg2win32(obj) src = cyg2win32(src) if is_f_file(src) and not has_f90_header(src): f77_objects.append((obj, (src, ext))) else: other_objects.append((obj, (src, ext))) # f77 objects can be built in parallel build_items = f77_objects # build f90 modules serial, module files are generated during # compilation and may be used by files later in the list so the # ordering is important for o in other_objects: single_compile(o) else: build_items = build.items() if len(build) > 1 and jobs > 1: # build parallel import multiprocessing.pool pool = multiprocessing.pool.ThreadPool(jobs) pool.map(single_compile, build_items) pool.close() else: # build serial for o in build_items: single_compile(o) # Return *all* object filenames, not just the ones we just built. return objects replace_method(CCompiler, 'compile', CCompiler_compile) def CCompiler_customize_cmd(self, cmd, ignore=()): """ Customize compiler using distutils command. Parameters ---------- cmd : class instance An instance inheriting from `distutils.cmd.Command`. ignore : sequence of str, optional List of `CCompiler` commands (without ``'set_'``) that should not be altered. Strings that are checked for are: ``('include_dirs', 'define', 'undef', 'libraries', 'library_dirs', 'rpath', 'link_objects')``. Returns ------- None """ log.info('customize %s using %s' % (self.__class__.__name__, cmd.__class__.__name__)) if hasattr(self, 'compiler') and 'clang' in self.compiler[0]: # clang defaults to a non-strict floating error point model. # Since NumPy and most Python libs give warnings for these, override: self.compiler.append('-ffp-exception-behavior=strict') def allow(attr): return getattr(cmd, attr, None) is not None and attr not in ignore if allow('include_dirs'): self.set_include_dirs(cmd.include_dirs) if allow('define'): for (name, value) in cmd.define: self.define_macro(name, value) if allow('undef'): for macro in cmd.undef: self.undefine_macro(macro) if allow('libraries'): self.set_libraries(self.libraries + cmd.libraries) if allow('library_dirs'): self.set_library_dirs(self.library_dirs + cmd.library_dirs) if allow('rpath'): self.set_runtime_library_dirs(cmd.rpath) if allow('link_objects'): self.set_link_objects(cmd.link_objects) replace_method(CCompiler, 'customize_cmd', CCompiler_customize_cmd) def _compiler_to_string(compiler): props = [] mx = 0 keys = list(compiler.executables.keys()) for key in ['version', 'libraries', 'library_dirs', 'object_switch', 'compile_switch', 'include_dirs', 'define', 'undef', 'rpath', 'link_objects']: if key not in keys: keys.append(key) for key in keys: if hasattr(compiler, key): v = getattr(compiler, key) mx = max(mx, len(key)) props.append((key, repr(v))) fmt = '%-' + repr(mx+1) + 's = %s' lines = [fmt % prop for prop in props] return '\n'.join(lines) def CCompiler_show_customization(self): """ Print the compiler customizations to stdout. Parameters ---------- None Returns ------- None Notes ----- Printing is only done if the distutils log threshold is < 2. """ try: self.get_version() except Exception: pass if log._global_log.threshold<2: print('*'*80) print(self.__class__) print(_compiler_to_string(self)) print('*'*80) replace_method(CCompiler, 'show_customization', CCompiler_show_customization) def CCompiler_customize(self, dist, need_cxx=0): """ Do any platform-specific customization of a compiler instance. This method calls `distutils.sysconfig.customize_compiler` for platform-specific customization, as well as optionally remove a flag to suppress spurious warnings in case C++ code is being compiled. Parameters ---------- dist : object This parameter is not used for anything. need_cxx : bool, optional Whether or not C++ has to be compiled. If so (True), the ``"-Wstrict-prototypes"`` option is removed to prevent spurious warnings. Default is False. Returns ------- None Notes ----- All the default options used by distutils can be extracted with:: from distutils import sysconfig sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'BASECFLAGS', 'CCSHARED', 'LDSHARED', 'SO') """ # See FCompiler.customize for suggested usage. log.info('customize %s' % (self.__class__.__name__)) customize_compiler(self) if need_cxx: # In general, distutils uses -Wstrict-prototypes, but this option is # not valid for C++ code, only for C. Remove it if it's there to # avoid a spurious warning on every compilation. try: self.compiler_so.remove('-Wstrict-prototypes') except (AttributeError, ValueError): pass if hasattr(self, 'compiler') and 'cc' in self.compiler[0]: if not self.compiler_cxx: if self.compiler[0].startswith('gcc'): a, b = 'gcc', 'g++' else: a, b = 'cc', 'c++' self.compiler_cxx = [self.compiler[0].replace(a, b)]\ + self.compiler[1:] else: if hasattr(self, 'compiler'): log.warn("#### %s #######" % (self.compiler,)) if not hasattr(self, 'compiler_cxx'): log.warn('Missing compiler_cxx fix for ' + self.__class__.__name__) # check if compiler supports gcc style automatic dependencies # run on every extension so skip for known good compilers if hasattr(self, 'compiler') and ('gcc' in self.compiler[0] or 'g++' in self.compiler[0] or 'clang' in self.compiler[0]): self._auto_depends = True elif os.name == 'posix': import tempfile import shutil tmpdir = tempfile.mkdtemp() try: fn = os.path.join(tmpdir, "file.c") with open(fn, "w") as f: f.write("int a;\n") self.compile([fn], output_dir=tmpdir, extra_preargs=['-MMD', '-MF', fn + '.d']) self._auto_depends = True except CompileError: self._auto_depends = False finally: shutil.rmtree(tmpdir) return replace_method(CCompiler, 'customize', CCompiler_customize) def simple_version_match(pat=r'[-.\d]+', ignore='', start=''): """ Simple matching of version numbers, for use in CCompiler and FCompiler. Parameters ---------- pat : str, optional A regular expression matching version numbers. Default is ``r'[-.\\d]+'``. ignore : str, optional A regular expression matching patterns to skip. Default is ``''``, in which case nothing is skipped. start : str, optional A regular expression matching the start of where to start looking for version numbers. Default is ``''``, in which case searching is started at the beginning of the version string given to `matcher`. Returns ------- matcher : callable A function that is appropriate to use as the ``.version_match`` attribute of a `CCompiler` class. `matcher` takes a single parameter, a version string. """ def matcher(self, version_string): # version string may appear in the second line, so getting rid # of new lines: version_string = version_string.replace('\n', ' ') pos = 0 if start: m = re.match(start, version_string) if not m: return None pos = m.end() while True: m = re.search(pat, version_string[pos:]) if not m: return None if ignore and re.match(ignore, m.group(0)): pos = m.end() continue break return m.group(0) return matcher def CCompiler_get_version(self, force=False, ok_status=[0]): """ Return compiler version, or None if compiler is not available. Parameters ---------- force : bool, optional If True, force a new determination of the version, even if the compiler already has a version attribute. Default is False. ok_status : list of int, optional The list of status values returned by the version look-up process for which a version string is returned. If the status value is not in `ok_status`, None is returned. Default is ``[0]``. Returns ------- version : str or None Version string, in the format of `distutils.version.LooseVersion`. """ if not force and hasattr(self, 'version'): return self.version self.find_executables() try: version_cmd = self.version_cmd except AttributeError: return None if not version_cmd or not version_cmd[0]: return None try: matcher = self.version_match except AttributeError: try: pat = self.version_pattern except AttributeError: return None def matcher(version_string): m = re.match(pat, version_string) if not m: return None version = m.group('version') return version try: output = subprocess.check_output(version_cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as exc: output = exc.output status = exc.returncode except OSError: # match the historical returns for a parent # exception class caught by exec_command() status = 127 output = b'' else: # output isn't actually a filepath but we do this # for now to match previous distutils behavior output = filepath_from_subprocess_output(output) status = 0 version = None if status in ok_status: version = matcher(output) if version: version = LooseVersion(version) self.version = version return version replace_method(CCompiler, 'get_version', CCompiler_get_version) def CCompiler_cxx_compiler(self): """ Return the C++ compiler. Parameters ---------- None Returns ------- cxx : class instance The C++ compiler, as a `CCompiler` instance. """ if self.compiler_type in ('msvc', 'intelw', 'intelemw'): return self cxx = copy(self) cxx.compiler_so = [cxx.compiler_cxx[0]] + cxx.compiler_so[1:] if sys.platform.startswith('aix') and 'ld_so_aix' in cxx.linker_so[0]: # AIX needs the ld_so_aix script included with Python cxx.linker_so = [cxx.linker_so[0], cxx.compiler_cxx[0]] \ + cxx.linker_so[2:] else: cxx.linker_so = [cxx.compiler_cxx[0]] + cxx.linker_so[1:] return cxx replace_method(CCompiler, 'cxx_compiler', CCompiler_cxx_compiler) compiler_class['intel'] = ('intelccompiler', 'IntelCCompiler', "Intel C Compiler for 32-bit applications") compiler_class['intele'] = ('intelccompiler', 'IntelItaniumCCompiler', "Intel C Itanium Compiler for Itanium-based applications") compiler_class['intelem'] = ('intelccompiler', 'IntelEM64TCCompiler', "Intel C Compiler for 64-bit applications") compiler_class['intelw'] = ('intelccompiler', 'IntelCCompilerW', "Intel C Compiler for 32-bit applications on Windows") compiler_class['intelemw'] = ('intelccompiler', 'IntelEM64TCCompilerW', "Intel C Compiler for 64-bit applications on Windows") compiler_class['pathcc'] = ('pathccompiler', 'PathScaleCCompiler', "PathScale Compiler for SiCortex-based applications") ccompiler._default_compilers += (('linux.*', 'intel'), ('linux.*', 'intele'), ('linux.*', 'intelem'), ('linux.*', 'pathcc'), ('nt', 'intelw'), ('nt', 'intelemw')) if sys.platform == 'win32': compiler_class['mingw32'] = ('mingw32ccompiler', 'Mingw32CCompiler', "Mingw32 port of GNU C Compiler for Win32"\ "(for MSC built Python)") if mingw32(): # On windows platforms, we want to default to mingw32 (gcc) # because msvc can't build blitz stuff. log.info('Setting mingw32 as default compiler for nt.') ccompiler._default_compilers = (('nt', 'mingw32'),) \ + ccompiler._default_compilers _distutils_new_compiler = new_compiler def new_compiler (plat=None, compiler=None, verbose=None, dry_run=0, force=0): # Try first C compilers from numpy.distutils. if verbose is None: verbose = log.get_threshold() <= log.INFO if plat is None: plat = os.name try: if compiler is None: compiler = get_default_compiler(plat) (module_name, class_name, long_description) = compiler_class[compiler] except KeyError: msg = "don't know how to compile C/C++ code on platform '%s'" % plat if compiler is not None: msg = msg + " with '%s' compiler" % compiler raise DistutilsPlatformError(msg) module_name = "numpy.distutils." + module_name try: __import__ (module_name) except ImportError as e: msg = str(e) log.info('%s in numpy.distutils; trying from distutils', str(msg)) module_name = module_name[6:] try: __import__(module_name) except ImportError as e: msg = str(e) raise DistutilsModuleError("can't compile C/C++ code: unable to load module '%s'" % \ module_name) try: module = sys.modules[module_name] klass = vars(module)[class_name] except KeyError: raise DistutilsModuleError(("can't compile C/C++ code: unable to find class '%s' " + "in module '%s'") % (class_name, module_name)) compiler = klass(None, dry_run, force) compiler.verbose = verbose log.debug('new_compiler returns %s' % (klass)) return compiler ccompiler.new_compiler = new_compiler _distutils_gen_lib_options = gen_lib_options def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries): # the version of this function provided by CPython allows the following # to return lists, which are unpacked automatically: # - compiler.runtime_library_dir_option # our version extends the behavior to: # - compiler.library_dir_option # - compiler.library_option # - compiler.find_library_file r = _distutils_gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries) lib_opts = [] for i in r: if is_sequence(i): lib_opts.extend(list(i)) else: lib_opts.append(i) return lib_opts ccompiler.gen_lib_options = gen_lib_options # Also fix up the various compiler modules, which do # from distutils.ccompiler import gen_lib_options # Don't bother with mwerks, as we don't support Classic Mac. for _cc in ['msvc9', 'msvc', '_msvc', 'bcpp', 'cygwinc', 'emxc', 'unixc']: _m = sys.modules.get('distutils.' + _cc + 'compiler') if _m is not None: setattr(_m, 'gen_lib_options', gen_lib_options)
import numpy as np import functools import sys import pytest from numpy.lib.shape_base import ( apply_along_axis, apply_over_axes, array_split, split, hsplit, dsplit, vsplit, dstack, column_stack, kron, tile, expand_dims, take_along_axis, put_along_axis ) from numpy.testing import ( assert_, assert_equal, assert_array_equal, assert_raises, assert_warns ) IS_64BIT = sys.maxsize > 2**32 def _add_keepdims(func): """ hack in keepdims behavior into a function taking an axis """ @functools.wraps(func) def wrapped(a, axis, **kwargs): res = func(a, axis=axis, **kwargs) if axis is None: axis = 0 # res is now a scalar, so we can insert this anywhere return np.expand_dims(res, axis=axis) return wrapped class TestTakeAlongAxis: def test_argequivalent(self): """ Test it translates from arg<func> to <func> """ from numpy.random import rand a = rand(3, 4, 5) funcs = [ (np.sort, np.argsort, dict()), (_add_keepdims(np.min), _add_keepdims(np.argmin), dict()), (_add_keepdims(np.max), _add_keepdims(np.argmax), dict()), (np.partition, np.argpartition, dict(kth=2)), ] for func, argfunc, kwargs in funcs: for axis in list(range(a.ndim)) + [None]: a_func = func(a, axis=axis, **kwargs) ai_func = argfunc(a, axis=axis, **kwargs) assert_equal(a_func, take_along_axis(a, ai_func, axis=axis)) def test_invalid(self): """ Test it errors when indices has too few dimensions """ a = np.ones((10, 10)) ai = np.ones((10, 2), dtype=np.intp) # sanity check take_along_axis(a, ai, axis=1) # not enough indices assert_raises(ValueError, take_along_axis, a, np.array(1), axis=1) # bool arrays not allowed assert_raises(IndexError, take_along_axis, a, ai.astype(bool), axis=1) # float arrays not allowed assert_raises(IndexError, take_along_axis, a, ai.astype(float), axis=1) # invalid axis assert_raises(np.AxisError, take_along_axis, a, ai, axis=10) def test_empty(self): """ Test everything is ok with empty results, even with inserted dims """ a = np.ones((3, 4, 5)) ai = np.ones((3, 0, 5), dtype=np.intp) actual = take_along_axis(a, ai, axis=1) assert_equal(actual.shape, ai.shape) def test_broadcast(self): """ Test that non-indexing dimensions are broadcast in both directions """ a = np.ones((3, 4, 1)) ai = np.ones((1, 2, 5), dtype=np.intp) actual = take_along_axis(a, ai, axis=1) assert_equal(actual.shape, (3, 2, 5)) class TestPutAlongAxis: def test_replace_max(self): a_base = np.array([[10, 30, 20], [60, 40, 50]]) for axis in list(range(a_base.ndim)) + [None]: # we mutate this in the loop a = a_base.copy() # replace the max with a small value i_max = _add_keepdims(np.argmax)(a, axis=axis) put_along_axis(a, i_max, -99, axis=axis) # find the new minimum, which should max i_min = _add_keepdims(np.argmin)(a, axis=axis) assert_equal(i_min, i_max) def test_broadcast(self): """ Test that non-indexing dimensions are broadcast in both directions """ a = np.ones((3, 4, 1)) ai = np.arange(10, dtype=np.intp).reshape((1, 2, 5)) % 4 put_along_axis(a, ai, 20, axis=1) assert_equal(take_along_axis(a, ai, axis=1), 20) class TestApplyAlongAxis: def test_simple(self): a = np.ones((20, 10), 'd') assert_array_equal( apply_along_axis(len, 0, a), len(a)*np.ones(a.shape[1])) def test_simple101(self): a = np.ones((10, 101), 'd') assert_array_equal( apply_along_axis(len, 0, a), len(a)*np.ones(a.shape[1])) def test_3d(self): a = np.arange(27).reshape((3, 3, 3)) assert_array_equal(apply_along_axis(np.sum, 0, a), [[27, 30, 33], [36, 39, 42], [45, 48, 51]]) def test_preserve_subclass(self): def double(row): return row * 2 class MyNDArray(np.ndarray): pass m = np.array([[0, 1], [2, 3]]).view(MyNDArray) expected = np.array([[0, 2], [4, 6]]).view(MyNDArray) result = apply_along_axis(double, 0, m) assert_(isinstance(result, MyNDArray)) assert_array_equal(result, expected) result = apply_along_axis(double, 1, m) assert_(isinstance(result, MyNDArray)) assert_array_equal(result, expected) def test_subclass(self): class MinimalSubclass(np.ndarray): data = 1 def minimal_function(array): return array.data a = np.zeros((6, 3)).view(MinimalSubclass) assert_array_equal( apply_along_axis(minimal_function, 0, a), np.array([1, 1, 1]) ) def test_scalar_array(self, cls=np.ndarray): a = np.ones((6, 3)).view(cls) res = apply_along_axis(np.sum, 0, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([6, 6, 6]).view(cls)) def test_0d_array(self, cls=np.ndarray): def sum_to_0d(x): """ Sum x, returning a 0d array of the same class """ assert_equal(x.ndim, 1) return np.squeeze(np.sum(x, keepdims=True)) a = np.ones((6, 3)).view(cls) res = apply_along_axis(sum_to_0d, 0, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([6, 6, 6]).view(cls)) res = apply_along_axis(sum_to_0d, 1, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([3, 3, 3, 3, 3, 3]).view(cls)) def test_axis_insertion(self, cls=np.ndarray): def f1to2(x): """produces an asymmetric non-square matrix from x""" assert_equal(x.ndim, 1) return (x[::-1] * x[1:,None]).view(cls) a2d = np.arange(6*3).reshape((6, 3)) # 2d insertion along first axis actual = apply_along_axis(f1to2, 0, a2d) expected = np.stack([ f1to2(a2d[:,i]) for i in range(a2d.shape[1]) ], axis=-1).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) # 2d insertion along last axis actual = apply_along_axis(f1to2, 1, a2d) expected = np.stack([ f1to2(a2d[i,:]) for i in range(a2d.shape[0]) ], axis=0).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) # 3d insertion along middle axis a3d = np.arange(6*5*3).reshape((6, 5, 3)) actual = apply_along_axis(f1to2, 1, a3d) expected = np.stack([ np.stack([ f1to2(a3d[i,:,j]) for i in range(a3d.shape[0]) ], axis=0) for j in range(a3d.shape[2]) ], axis=-1).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) def test_subclass_preservation(self): class MinimalSubclass(np.ndarray): pass self.test_scalar_array(MinimalSubclass) self.test_0d_array(MinimalSubclass) self.test_axis_insertion(MinimalSubclass) def test_axis_insertion_ma(self): def f1to2(x): """produces an asymmetric non-square matrix from x""" assert_equal(x.ndim, 1) res = x[::-1] * x[1:,None] return np.ma.masked_where(res%5==0, res) a = np.arange(6*3).reshape((6, 3)) res = apply_along_axis(f1to2, 0, a) assert_(isinstance(res, np.ma.masked_array)) assert_equal(res.ndim, 3) assert_array_equal(res[:,:,0].mask, f1to2(a[:,0]).mask) assert_array_equal(res[:,:,1].mask, f1to2(a[:,1]).mask) assert_array_equal(res[:,:,2].mask, f1to2(a[:,2]).mask) def test_tuple_func1d(self): def sample_1d(x): return x[1], x[0] res = np.apply_along_axis(sample_1d, 1, np.array([[1, 2], [3, 4]])) assert_array_equal(res, np.array([[2, 1], [4, 3]])) def test_empty(self): # can't apply_along_axis when there's no chance to call the function def never_call(x): assert_(False) # should never be reached a = np.empty((0, 0)) assert_raises(ValueError, np.apply_along_axis, never_call, 0, a) assert_raises(ValueError, np.apply_along_axis, never_call, 1, a) # but it's sometimes ok with some non-zero dimensions def empty_to_1(x): assert_(len(x) == 0) return 1 a = np.empty((10, 0)) actual = np.apply_along_axis(empty_to_1, 1, a) assert_equal(actual, np.ones(10)) assert_raises(ValueError, np.apply_along_axis, empty_to_1, 0, a) def test_with_iterable_object(self): # from issue 5248 d = np.array([ [{1, 11}, {2, 22}, {3, 33}], [{4, 44}, {5, 55}, {6, 66}] ]) actual = np.apply_along_axis(lambda a: set.union(*a), 0, d) expected = np.array([{1, 11, 4, 44}, {2, 22, 5, 55}, {3, 33, 6, 66}]) assert_equal(actual, expected) # issue 8642 - assert_equal doesn't detect this! for i in np.ndindex(actual.shape): assert_equal(type(actual[i]), type(expected[i])) class TestApplyOverAxes: def test_simple(self): a = np.arange(24).reshape(2, 3, 4) aoa_a = apply_over_axes(np.sum, a, [0, 2]) assert_array_equal(aoa_a, np.array([[[60], [92], [124]]])) class TestExpandDims: def test_functionality(self): s = (2, 3, 4, 5) a = np.empty(s) for axis in range(-5, 4): b = expand_dims(a, axis) assert_(b.shape[axis] == 1) assert_(np.squeeze(b).shape == s) def test_axis_tuple(self): a = np.empty((3, 3, 3)) assert np.expand_dims(a, axis=(0, 1, 2)).shape == (1, 1, 1, 3, 3, 3) assert np.expand_dims(a, axis=(0, -1, -2)).shape == (1, 3, 3, 3, 1, 1) assert np.expand_dims(a, axis=(0, 3, 5)).shape == (1, 3, 3, 1, 3, 1) assert np.expand_dims(a, axis=(0, -3, -5)).shape == (1, 1, 3, 1, 3, 3) def test_axis_out_of_range(self): s = (2, 3, 4, 5) a = np.empty(s) assert_raises(np.AxisError, expand_dims, a, -6) assert_raises(np.AxisError, expand_dims, a, 5) a = np.empty((3, 3, 3)) assert_raises(np.AxisError, expand_dims, a, (0, -6)) assert_raises(np.AxisError, expand_dims, a, (0, 5)) def test_repeated_axis(self): a = np.empty((3, 3, 3)) assert_raises(ValueError, expand_dims, a, axis=(1, 1)) def test_subclasses(self): a = np.arange(10).reshape((2, 5)) a = np.ma.array(a, mask=a%3 == 0) expanded = np.expand_dims(a, axis=1) assert_(isinstance(expanded, np.ma.MaskedArray)) assert_equal(expanded.shape, (2, 1, 5)) assert_equal(expanded.mask.shape, (2, 1, 5)) class TestArraySplit: def test_integer_0_split(self): a = np.arange(10) assert_raises(ValueError, array_split, a, 0) def test_integer_split(self): a = np.arange(10) res = array_split(a, 1) desired = [np.arange(10)] compare_results(res, desired) res = array_split(a, 2) desired = [np.arange(5), np.arange(5, 10)] compare_results(res, desired) res = array_split(a, 3) desired = [np.arange(4), np.arange(4, 7), np.arange(7, 10)] compare_results(res, desired) res = array_split(a, 4) desired = [np.arange(3), np.arange(3, 6), np.arange(6, 8), np.arange(8, 10)] compare_results(res, desired) res = array_split(a, 5) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 8), np.arange(8, 10)] compare_results(res, desired) res = array_split(a, 6) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 7) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 8) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 9) desired = [np.arange(2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 10) desired = [np.arange(1), np.arange(1, 2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 11) desired = [np.arange(1), np.arange(1, 2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10), np.array([])] compare_results(res, desired) def test_integer_split_2D_rows(self): a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3, axis=0) tgt = [np.array([np.arange(10)]), np.array([np.arange(10)]), np.zeros((0, 10))] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) # Same thing for manual splits: res = array_split(a, [0, 1, 2], axis=0) tgt = [np.zeros((0, 10)), np.array([np.arange(10)]), np.array([np.arange(10)])] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) def test_integer_split_2D_cols(self): a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3, axis=-1) desired = [np.array([np.arange(4), np.arange(4)]), np.array([np.arange(4, 7), np.arange(4, 7)]), np.array([np.arange(7, 10), np.arange(7, 10)])] compare_results(res, desired) def test_integer_split_2D_default(self): """ This will fail if we change default axis """ a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3) tgt = [np.array([np.arange(10)]), np.array([np.arange(10)]), np.zeros((0, 10))] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) # perhaps should check higher dimensions @pytest.mark.skipif(not IS_64BIT, reason="Needs 64bit platform") def test_integer_split_2D_rows_greater_max_int32(self): a = np.broadcast_to([0], (1 << 32, 2)) res = array_split(a, 4) chunk = np.broadcast_to([0], (1 << 30, 2)) tgt = [chunk] * 4 for i in range(len(tgt)): assert_equal(res[i].shape, tgt[i].shape) def test_index_split_simple(self): a = np.arange(10) indices = [1, 5, 7] res = array_split(a, indices, axis=-1) desired = [np.arange(0, 1), np.arange(1, 5), np.arange(5, 7), np.arange(7, 10)] compare_results(res, desired) def test_index_split_low_bound(self): a = np.arange(10) indices = [0, 5, 7] res = array_split(a, indices, axis=-1) desired = [np.array([]), np.arange(0, 5), np.arange(5, 7), np.arange(7, 10)] compare_results(res, desired) def test_index_split_high_bound(self): a = np.arange(10) indices = [0, 5, 7, 10, 12] res = array_split(a, indices, axis=-1) desired = [np.array([]), np.arange(0, 5), np.arange(5, 7), np.arange(7, 10), np.array([]), np.array([])] compare_results(res, desired) class TestSplit: # The split function is essentially the same as array_split, # except that it test if splitting will result in an # equal split. Only test for this case. def test_equal_split(self): a = np.arange(10) res = split(a, 2) desired = [np.arange(5), np.arange(5, 10)] compare_results(res, desired) def test_unequal_split(self): a = np.arange(10) assert_raises(ValueError, split, a, 3) class TestColumnStack: def test_non_iterable(self): assert_raises(TypeError, column_stack, 1) def test_1D_arrays(self): # example from docstring a = np.array((1, 2, 3)) b = np.array((2, 3, 4)) expected = np.array([[1, 2], [2, 3], [3, 4]]) actual = np.column_stack((a, b)) assert_equal(actual, expected) def test_2D_arrays(self): # same as hstack 2D docstring example a = np.array([[1], [2], [3]]) b = np.array([[2], [3], [4]]) expected = np.array([[1, 2], [2, 3], [3, 4]]) actual = np.column_stack((a, b)) assert_equal(actual, expected) def test_generator(self): with assert_warns(FutureWarning): column_stack((np.arange(3) for _ in range(2))) class TestDstack: def test_non_iterable(self): assert_raises(TypeError, dstack, 1) def test_0D_array(self): a = np.array(1) b = np.array(2) res = dstack([a, b]) desired = np.array([[[1, 2]]]) assert_array_equal(res, desired) def test_1D_array(self): a = np.array([1]) b = np.array([2]) res = dstack([a, b]) desired = np.array([[[1, 2]]]) assert_array_equal(res, desired) def test_2D_array(self): a = np.array([[1], [2]]) b = np.array([[1], [2]]) res = dstack([a, b]) desired = np.array([[[1, 1]], [[2, 2, ]]]) assert_array_equal(res, desired) def test_2D_array2(self): a = np.array([1, 2]) b = np.array([1, 2]) res = dstack([a, b]) desired = np.array([[[1, 1], [2, 2]]]) assert_array_equal(res, desired) def test_generator(self): with assert_warns(FutureWarning): dstack((np.arange(3) for _ in range(2))) # array_split has more comprehensive test of splitting. # only do simple test on hsplit, vsplit, and dsplit class TestHsplit: """Only testing for integer splits. """ def test_non_iterable(self): assert_raises(ValueError, hsplit, 1, 1) def test_0D_array(self): a = np.array(1) try: hsplit(a, 2) assert_(0) except ValueError: pass def test_1D_array(self): a = np.array([1, 2, 3, 4]) res = hsplit(a, 2) desired = [np.array([1, 2]), np.array([3, 4])] compare_results(res, desired) def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) res = hsplit(a, 2) desired = [np.array([[1, 2], [1, 2]]), np.array([[3, 4], [3, 4]])] compare_results(res, desired) class TestVsplit: """Only testing for integer splits. """ def test_non_iterable(self): assert_raises(ValueError, vsplit, 1, 1) def test_0D_array(self): a = np.array(1) assert_raises(ValueError, vsplit, a, 2) def test_1D_array(self): a = np.array([1, 2, 3, 4]) try: vsplit(a, 2) assert_(0) except ValueError: pass def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) res = vsplit(a, 2) desired = [np.array([[1, 2, 3, 4]]), np.array([[1, 2, 3, 4]])] compare_results(res, desired) class TestDsplit: # Only testing for integer splits. def test_non_iterable(self): assert_raises(ValueError, dsplit, 1, 1) def test_0D_array(self): a = np.array(1) assert_raises(ValueError, dsplit, a, 2) def test_1D_array(self): a = np.array([1, 2, 3, 4]) assert_raises(ValueError, dsplit, a, 2) def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) try: dsplit(a, 2) assert_(0) except ValueError: pass def test_3D_array(self): a = np.array([[[1, 2, 3, 4], [1, 2, 3, 4]], [[1, 2, 3, 4], [1, 2, 3, 4]]]) res = dsplit(a, 2) desired = [np.array([[[1, 2], [1, 2]], [[1, 2], [1, 2]]]), np.array([[[3, 4], [3, 4]], [[3, 4], [3, 4]]])] compare_results(res, desired) class TestSqueeze: def test_basic(self): from numpy.random import rand a = rand(20, 10, 10, 1, 1) b = rand(20, 1, 10, 1, 20) c = rand(1, 1, 20, 10) assert_array_equal(np.squeeze(a), np.reshape(a, (20, 10, 10))) assert_array_equal(np.squeeze(b), np.reshape(b, (20, 10, 20))) assert_array_equal(np.squeeze(c), np.reshape(c, (20, 10))) # Squeezing to 0-dim should still give an ndarray a = [[[1.5]]] res = np.squeeze(a) assert_equal(res, 1.5) assert_equal(res.ndim, 0) assert_equal(type(res), np.ndarray) class TestKron: def test_return_type(self): class myarray(np.ndarray): __array_priority__ = 0.0 a = np.ones([2, 2]) ma = myarray(a.shape, a.dtype, a.data) assert_equal(type(kron(a, a)), np.ndarray) assert_equal(type(kron(ma, ma)), myarray) assert_equal(type(kron(a, ma)), np.ndarray) assert_equal(type(kron(ma, a)), myarray) class TestTile: def test_basic(self): a = np.array([0, 1, 2]) b = [[1, 2], [3, 4]] assert_equal(tile(a, 2), [0, 1, 2, 0, 1, 2]) assert_equal(tile(a, (2, 2)), [[0, 1, 2, 0, 1, 2], [0, 1, 2, 0, 1, 2]]) assert_equal(tile(a, (1, 2)), [[0, 1, 2, 0, 1, 2]]) assert_equal(tile(b, 2), [[1, 2, 1, 2], [3, 4, 3, 4]]) assert_equal(tile(b, (2, 1)), [[1, 2], [3, 4], [1, 2], [3, 4]]) assert_equal(tile(b, (2, 2)), [[1, 2, 1, 2], [3, 4, 3, 4], [1, 2, 1, 2], [3, 4, 3, 4]]) def test_tile_one_repetition_on_array_gh4679(self): a = np.arange(5) b = tile(a, 1) b += 2 assert_equal(a, np.arange(5)) def test_empty(self): a = np.array([[[]]]) b = np.array([[], []]) c = tile(b, 2).shape d = tile(a, (3, 2, 5)).shape assert_equal(c, (2, 0)) assert_equal(d, (3, 2, 0)) def test_kroncompare(self): from numpy.random import randint reps = [(2,), (1, 2), (2, 1), (2, 2), (2, 3, 2), (3, 2)] shape = [(3,), (2, 3), (3, 4, 3), (3, 2, 3), (4, 3, 2, 4), (2, 2)] for s in shape: b = randint(0, 10, size=s) for r in reps: a = np.ones(r, b.dtype) large = tile(b, r) klarge = kron(a, b) assert_equal(large, klarge) class TestMayShareMemory: def test_basic(self): d = np.ones((50, 60)) d2 = np.ones((30, 60, 6)) assert_(np.may_share_memory(d, d)) assert_(np.may_share_memory(d, d[::-1])) assert_(np.may_share_memory(d, d[::2])) assert_(np.may_share_memory(d, d[1:, ::-1])) assert_(not np.may_share_memory(d[::-1], d2)) assert_(not np.may_share_memory(d[::2], d2)) assert_(not np.may_share_memory(d[1:, ::-1], d2)) assert_(np.may_share_memory(d2[1:, ::-1], d2)) # Utility def compare_results(res, desired): for i in range(len(desired)): assert_array_equal(res[i], desired[i])
mhvk/numpy
numpy/lib/tests/test_shape_base.py
numpy/distutils/ccompiler.py
"""Here we define the exported functions, types, etc... which need to be exported through a global C pointer. Each dictionary contains name -> index pair. Whenever you change one index, you break the ABI (and the ABI version number should be incremented). Whenever you add an item to one of the dict, the API needs to be updated in both setup_common.py and by adding an appropriate entry to cversion.txt (generate the hash via "python cversions.py"). When adding a function, make sure to use the next integer not used as an index (in case you use an existing index or jump, the build will stop and raise an exception, so it should hopefully not get unnoticed). """ from code_generators.genapi import StealRef, NonNull # index, type multiarray_global_vars = { 'NPY_NUMUSERTYPES': (7, 'int'), 'NPY_DEFAULT_ASSIGN_CASTING': (292, 'NPY_CASTING'), } multiarray_scalar_bool_values = { '_PyArrayScalar_BoolValues': (9,) } # index, annotations # please mark functions that have been checked to not need any annotations multiarray_types_api = { 'PyBigArray_Type': (1,), 'PyArray_Type': (2,), # Internally, PyArrayDescr_Type is a PyArray_DTypeMeta, # the following also defines PyArrayDescr_TypeFull (Full appended) 'PyArrayDescr_Type': (3, "PyArray_DTypeMeta"), 'PyArrayFlags_Type': (4,), 'PyArrayIter_Type': (5,), 'PyArrayMultiIter_Type': (6,), 'PyBoolArrType_Type': (8,), 'PyGenericArrType_Type': (10,), 'PyNumberArrType_Type': (11,), 'PyIntegerArrType_Type': (12,), 'PySignedIntegerArrType_Type': (13,), 'PyUnsignedIntegerArrType_Type': (14,), 'PyInexactArrType_Type': (15,), 'PyFloatingArrType_Type': (16,), 'PyComplexFloatingArrType_Type': (17,), 'PyFlexibleArrType_Type': (18,), 'PyCharacterArrType_Type': (19,), 'PyByteArrType_Type': (20,), 'PyShortArrType_Type': (21,), 'PyIntArrType_Type': (22,), 'PyLongArrType_Type': (23,), 'PyLongLongArrType_Type': (24,), 'PyUByteArrType_Type': (25,), 'PyUShortArrType_Type': (26,), 'PyUIntArrType_Type': (27,), 'PyULongArrType_Type': (28,), 'PyULongLongArrType_Type': (29,), 'PyFloatArrType_Type': (30,), 'PyDoubleArrType_Type': (31,), 'PyLongDoubleArrType_Type': (32,), 'PyCFloatArrType_Type': (33,), 'PyCDoubleArrType_Type': (34,), 'PyCLongDoubleArrType_Type': (35,), 'PyObjectArrType_Type': (36,), 'PyStringArrType_Type': (37,), 'PyUnicodeArrType_Type': (38,), 'PyVoidArrType_Type': (39,), # End 1.5 API 'PyTimeIntegerArrType_Type': (214,), 'PyDatetimeArrType_Type': (215,), 'PyTimedeltaArrType_Type': (216,), 'PyHalfArrType_Type': (217,), 'NpyIter_Type': (218,), # End 1.6 API } #define NPY_NUMUSERTYPES (*(int *)PyArray_API[6]) #define PyBoolArrType_Type (*(PyTypeObject *)PyArray_API[7]) #define _PyArrayScalar_BoolValues ((PyBoolScalarObject *)PyArray_API[8]) multiarray_funcs_api = { 'PyArray_GetNDArrayCVersion': (0,), 'PyArray_SetNumericOps': (40,), 'PyArray_GetNumericOps': (41,), 'PyArray_INCREF': (42,), 'PyArray_XDECREF': (43,), 'PyArray_SetStringFunction': (44,), 'PyArray_DescrFromType': (45,), 'PyArray_TypeObjectFromType': (46,), 'PyArray_Zero': (47,), 'PyArray_One': (48,), 'PyArray_CastToType': (49, StealRef(2), NonNull(2)), 'PyArray_CastTo': (50,), 'PyArray_CastAnyTo': (51,), 'PyArray_CanCastSafely': (52,), 'PyArray_CanCastTo': (53,), 'PyArray_ObjectType': (54,), 'PyArray_DescrFromObject': (55,), 'PyArray_ConvertToCommonType': (56,), 'PyArray_DescrFromScalar': (57,), 'PyArray_DescrFromTypeObject': (58,), 'PyArray_Size': (59,), 'PyArray_Scalar': (60,), 'PyArray_FromScalar': (61, StealRef(2)), 'PyArray_ScalarAsCtype': (62,), 'PyArray_CastScalarToCtype': (63,), 'PyArray_CastScalarDirect': (64,), 'PyArray_ScalarFromObject': (65,), 'PyArray_GetCastFunc': (66,), 'PyArray_FromDims': (67,), 'PyArray_FromDimsAndDataAndDescr': (68, StealRef(3)), 'PyArray_FromAny': (69, StealRef(2)), 'PyArray_EnsureArray': (70, StealRef(1)), 'PyArray_EnsureAnyArray': (71, StealRef(1)), 'PyArray_FromFile': (72,), 'PyArray_FromString': (73,), 'PyArray_FromBuffer': (74,), 'PyArray_FromIter': (75, StealRef(2)), 'PyArray_Return': (76, StealRef(1)), 'PyArray_GetField': (77, StealRef(2), NonNull(2)), 'PyArray_SetField': (78, StealRef(2), NonNull(2)), 'PyArray_Byteswap': (79,), 'PyArray_Resize': (80,), 'PyArray_MoveInto': (81,), 'PyArray_CopyInto': (82,), 'PyArray_CopyAnyInto': (83,), 'PyArray_CopyObject': (84,), 'PyArray_NewCopy': (85, NonNull(1)), 'PyArray_ToList': (86,), 'PyArray_ToString': (87,), 'PyArray_ToFile': (88,), 'PyArray_Dump': (89,), 'PyArray_Dumps': (90,), 'PyArray_ValidType': (91,), 'PyArray_UpdateFlags': (92,), 'PyArray_New': (93, NonNull(1)), 'PyArray_NewFromDescr': (94, StealRef(2), NonNull([1, 2])), 'PyArray_DescrNew': (95,), 'PyArray_DescrNewFromType': (96,), 'PyArray_GetPriority': (97,), 'PyArray_IterNew': (98,), 'PyArray_MultiIterNew': (99,), 'PyArray_PyIntAsInt': (100,), 'PyArray_PyIntAsIntp': (101,), 'PyArray_Broadcast': (102,), 'PyArray_FillObjectArray': (103,), 'PyArray_FillWithScalar': (104,), 'PyArray_CheckStrides': (105,), 'PyArray_DescrNewByteorder': (106,), 'PyArray_IterAllButAxis': (107,), 'PyArray_CheckFromAny': (108, StealRef(2)), 'PyArray_FromArray': (109, StealRef(2)), 'PyArray_FromInterface': (110,), 'PyArray_FromStructInterface': (111,), 'PyArray_FromArrayAttr': (112,), 'PyArray_ScalarKind': (113,), 'PyArray_CanCoerceScalar': (114,), 'PyArray_NewFlagsObject': (115,), 'PyArray_CanCastScalar': (116,), 'PyArray_CompareUCS4': (117,), 'PyArray_RemoveSmallest': (118,), 'PyArray_ElementStrides': (119,), 'PyArray_Item_INCREF': (120,), 'PyArray_Item_XDECREF': (121,), 'PyArray_FieldNames': (122,), 'PyArray_Transpose': (123,), 'PyArray_TakeFrom': (124,), 'PyArray_PutTo': (125,), 'PyArray_PutMask': (126,), 'PyArray_Repeat': (127,), 'PyArray_Choose': (128,), 'PyArray_Sort': (129,), 'PyArray_ArgSort': (130,), 'PyArray_SearchSorted': (131,), 'PyArray_ArgMax': (132,), 'PyArray_ArgMin': (133,), 'PyArray_Reshape': (134,), 'PyArray_Newshape': (135,), 'PyArray_Squeeze': (136,), 'PyArray_View': (137, StealRef(2)), 'PyArray_SwapAxes': (138,), 'PyArray_Max': (139,), 'PyArray_Min': (140,), 'PyArray_Ptp': (141,), 'PyArray_Mean': (142,), 'PyArray_Trace': (143,), 'PyArray_Diagonal': (144,), 'PyArray_Clip': (145,), 'PyArray_Conjugate': (146,), 'PyArray_Nonzero': (147,), 'PyArray_Std': (148,), 'PyArray_Sum': (149,), 'PyArray_CumSum': (150,), 'PyArray_Prod': (151,), 'PyArray_CumProd': (152,), 'PyArray_All': (153,), 'PyArray_Any': (154,), 'PyArray_Compress': (155,), 'PyArray_Flatten': (156,), 'PyArray_Ravel': (157,), 'PyArray_MultiplyList': (158,), 'PyArray_MultiplyIntList': (159,), 'PyArray_GetPtr': (160,), 'PyArray_CompareLists': (161,), 'PyArray_AsCArray': (162, StealRef(5)), 'PyArray_As1D': (163,), 'PyArray_As2D': (164,), 'PyArray_Free': (165,), 'PyArray_Converter': (166,), 'PyArray_IntpFromSequence': (167,), 'PyArray_Concatenate': (168,), 'PyArray_InnerProduct': (169,), 'PyArray_MatrixProduct': (170,), 'PyArray_CopyAndTranspose': (171,), 'PyArray_Correlate': (172,), 'PyArray_TypestrConvert': (173,), 'PyArray_DescrConverter': (174,), 'PyArray_DescrConverter2': (175,), 'PyArray_IntpConverter': (176,), 'PyArray_BufferConverter': (177,), 'PyArray_AxisConverter': (178,), 'PyArray_BoolConverter': (179,), 'PyArray_ByteorderConverter': (180,), 'PyArray_OrderConverter': (181,), 'PyArray_EquivTypes': (182,), 'PyArray_Zeros': (183, StealRef(3)), 'PyArray_Empty': (184, StealRef(3)), 'PyArray_Where': (185,), 'PyArray_Arange': (186,), 'PyArray_ArangeObj': (187,), 'PyArray_SortkindConverter': (188,), 'PyArray_LexSort': (189,), 'PyArray_Round': (190,), 'PyArray_EquivTypenums': (191,), 'PyArray_RegisterDataType': (192,), 'PyArray_RegisterCastFunc': (193,), 'PyArray_RegisterCanCast': (194,), 'PyArray_InitArrFuncs': (195,), 'PyArray_IntTupleFromIntp': (196,), 'PyArray_TypeNumFromName': (197,), 'PyArray_ClipmodeConverter': (198,), 'PyArray_OutputConverter': (199,), 'PyArray_BroadcastToShape': (200,), '_PyArray_SigintHandler': (201,), '_PyArray_GetSigintBuf': (202,), 'PyArray_DescrAlignConverter': (203,), 'PyArray_DescrAlignConverter2': (204,), 'PyArray_SearchsideConverter': (205,), 'PyArray_CheckAxis': (206,), 'PyArray_OverflowMultiplyList': (207,), 'PyArray_CompareString': (208,), 'PyArray_MultiIterFromObjects': (209,), 'PyArray_GetEndianness': (210,), 'PyArray_GetNDArrayCFeatureVersion': (211,), 'PyArray_Correlate2': (212,), 'PyArray_NeighborhoodIterNew': (213,), # End 1.5 API 'PyArray_SetDatetimeParseFunction': (219,), 'PyArray_DatetimeToDatetimeStruct': (220,), 'PyArray_TimedeltaToTimedeltaStruct': (221,), 'PyArray_DatetimeStructToDatetime': (222,), 'PyArray_TimedeltaStructToTimedelta': (223,), # NDIter API 'NpyIter_New': (224,), 'NpyIter_MultiNew': (225,), 'NpyIter_AdvancedNew': (226,), 'NpyIter_Copy': (227,), 'NpyIter_Deallocate': (228,), 'NpyIter_HasDelayedBufAlloc': (229,), 'NpyIter_HasExternalLoop': (230,), 'NpyIter_EnableExternalLoop': (231,), 'NpyIter_GetInnerStrideArray': (232,), 'NpyIter_GetInnerLoopSizePtr': (233,), 'NpyIter_Reset': (234,), 'NpyIter_ResetBasePointers': (235,), 'NpyIter_ResetToIterIndexRange': (236,), 'NpyIter_GetNDim': (237,), 'NpyIter_GetNOp': (238,), 'NpyIter_GetIterNext': (239,), 'NpyIter_GetIterSize': (240,), 'NpyIter_GetIterIndexRange': (241,), 'NpyIter_GetIterIndex': (242,), 'NpyIter_GotoIterIndex': (243,), 'NpyIter_HasMultiIndex': (244,), 'NpyIter_GetShape': (245,), 'NpyIter_GetGetMultiIndex': (246,), 'NpyIter_GotoMultiIndex': (247,), 'NpyIter_RemoveMultiIndex': (248,), 'NpyIter_HasIndex': (249,), 'NpyIter_IsBuffered': (250,), 'NpyIter_IsGrowInner': (251,), 'NpyIter_GetBufferSize': (252,), 'NpyIter_GetIndexPtr': (253,), 'NpyIter_GotoIndex': (254,), 'NpyIter_GetDataPtrArray': (255,), 'NpyIter_GetDescrArray': (256,), 'NpyIter_GetOperandArray': (257,), 'NpyIter_GetIterView': (258,), 'NpyIter_GetReadFlags': (259,), 'NpyIter_GetWriteFlags': (260,), 'NpyIter_DebugPrint': (261,), 'NpyIter_IterationNeedsAPI': (262,), 'NpyIter_GetInnerFixedStrideArray': (263,), 'NpyIter_RemoveAxis': (264,), 'NpyIter_GetAxisStrideArray': (265,), 'NpyIter_RequiresBuffering': (266,), 'NpyIter_GetInitialDataPtrArray': (267,), 'NpyIter_CreateCompatibleStrides': (268,), # 'PyArray_CastingConverter': (269,), 'PyArray_CountNonzero': (270,), 'PyArray_PromoteTypes': (271,), 'PyArray_MinScalarType': (272,), 'PyArray_ResultType': (273,), 'PyArray_CanCastArrayTo': (274,), 'PyArray_CanCastTypeTo': (275,), 'PyArray_EinsteinSum': (276,), 'PyArray_NewLikeArray': (277, StealRef(3), NonNull(1)), 'PyArray_GetArrayParamsFromObject': (278,), 'PyArray_ConvertClipmodeSequence': (279,), 'PyArray_MatrixProduct2': (280,), # End 1.6 API 'NpyIter_IsFirstVisit': (281,), 'PyArray_SetBaseObject': (282, StealRef(2)), 'PyArray_CreateSortedStridePerm': (283,), 'PyArray_RemoveAxesInPlace': (284,), 'PyArray_DebugPrint': (285,), 'PyArray_FailUnlessWriteable': (286,), 'PyArray_SetUpdateIfCopyBase': (287, StealRef(2)), 'PyDataMem_NEW': (288,), 'PyDataMem_FREE': (289,), 'PyDataMem_RENEW': (290,), 'PyDataMem_SetEventHook': (291,), 'PyArray_MapIterSwapAxes': (293,), 'PyArray_MapIterArray': (294,), 'PyArray_MapIterNext': (295,), # End 1.7 API 'PyArray_Partition': (296,), 'PyArray_ArgPartition': (297,), 'PyArray_SelectkindConverter': (298,), 'PyDataMem_NEW_ZEROED': (299,), # End 1.8 API # End 1.9 API 'PyArray_CheckAnyScalarExact': (300, NonNull(1)), # End 1.10 API 'PyArray_MapIterArrayCopyIfOverlap': (301,), # End 1.13 API 'PyArray_ResolveWritebackIfCopy': (302,), 'PyArray_SetWritebackIfCopyBase': (303,), # End 1.14 API } ufunc_types_api = { 'PyUFunc_Type': (0,) } ufunc_funcs_api = { 'PyUFunc_FromFuncAndData': (1,), 'PyUFunc_RegisterLoopForType': (2,), 'PyUFunc_GenericFunction': (3,), 'PyUFunc_f_f_As_d_d': (4,), 'PyUFunc_d_d': (5,), 'PyUFunc_f_f': (6,), 'PyUFunc_g_g': (7,), 'PyUFunc_F_F_As_D_D': (8,), 'PyUFunc_F_F': (9,), 'PyUFunc_D_D': (10,), 'PyUFunc_G_G': (11,), 'PyUFunc_O_O': (12,), 'PyUFunc_ff_f_As_dd_d': (13,), 'PyUFunc_ff_f': (14,), 'PyUFunc_dd_d': (15,), 'PyUFunc_gg_g': (16,), 'PyUFunc_FF_F_As_DD_D': (17,), 'PyUFunc_DD_D': (18,), 'PyUFunc_FF_F': (19,), 'PyUFunc_GG_G': (20,), 'PyUFunc_OO_O': (21,), 'PyUFunc_O_O_method': (22,), 'PyUFunc_OO_O_method': (23,), 'PyUFunc_On_Om': (24,), 'PyUFunc_GetPyValues': (25,), 'PyUFunc_checkfperr': (26,), 'PyUFunc_clearfperr': (27,), 'PyUFunc_getfperr': (28,), 'PyUFunc_handlefperr': (29,), 'PyUFunc_ReplaceLoopBySignature': (30,), 'PyUFunc_FromFuncAndDataAndSignature': (31,), 'PyUFunc_SetUsesArraysAsData': (32,), # End 1.5 API 'PyUFunc_e_e': (33,), 'PyUFunc_e_e_As_f_f': (34,), 'PyUFunc_e_e_As_d_d': (35,), 'PyUFunc_ee_e': (36,), 'PyUFunc_ee_e_As_ff_f': (37,), 'PyUFunc_ee_e_As_dd_d': (38,), # End 1.6 API 'PyUFunc_DefaultTypeResolver': (39,), 'PyUFunc_ValidateCasting': (40,), # End 1.7 API 'PyUFunc_RegisterLoopForDescr': (41,), # End 1.8 API 'PyUFunc_FromFuncAndDataAndSignatureAndIdentity': (42,), # End 1.16 API } # List of all the dicts which define the C API # XXX: DO NOT CHANGE THE ORDER OF TUPLES BELOW ! multiarray_api = ( multiarray_global_vars, multiarray_scalar_bool_values, multiarray_types_api, multiarray_funcs_api, ) ufunc_api = ( ufunc_funcs_api, ufunc_types_api ) full_api = multiarray_api + ufunc_api
import numpy as np import functools import sys import pytest from numpy.lib.shape_base import ( apply_along_axis, apply_over_axes, array_split, split, hsplit, dsplit, vsplit, dstack, column_stack, kron, tile, expand_dims, take_along_axis, put_along_axis ) from numpy.testing import ( assert_, assert_equal, assert_array_equal, assert_raises, assert_warns ) IS_64BIT = sys.maxsize > 2**32 def _add_keepdims(func): """ hack in keepdims behavior into a function taking an axis """ @functools.wraps(func) def wrapped(a, axis, **kwargs): res = func(a, axis=axis, **kwargs) if axis is None: axis = 0 # res is now a scalar, so we can insert this anywhere return np.expand_dims(res, axis=axis) return wrapped class TestTakeAlongAxis: def test_argequivalent(self): """ Test it translates from arg<func> to <func> """ from numpy.random import rand a = rand(3, 4, 5) funcs = [ (np.sort, np.argsort, dict()), (_add_keepdims(np.min), _add_keepdims(np.argmin), dict()), (_add_keepdims(np.max), _add_keepdims(np.argmax), dict()), (np.partition, np.argpartition, dict(kth=2)), ] for func, argfunc, kwargs in funcs: for axis in list(range(a.ndim)) + [None]: a_func = func(a, axis=axis, **kwargs) ai_func = argfunc(a, axis=axis, **kwargs) assert_equal(a_func, take_along_axis(a, ai_func, axis=axis)) def test_invalid(self): """ Test it errors when indices has too few dimensions """ a = np.ones((10, 10)) ai = np.ones((10, 2), dtype=np.intp) # sanity check take_along_axis(a, ai, axis=1) # not enough indices assert_raises(ValueError, take_along_axis, a, np.array(1), axis=1) # bool arrays not allowed assert_raises(IndexError, take_along_axis, a, ai.astype(bool), axis=1) # float arrays not allowed assert_raises(IndexError, take_along_axis, a, ai.astype(float), axis=1) # invalid axis assert_raises(np.AxisError, take_along_axis, a, ai, axis=10) def test_empty(self): """ Test everything is ok with empty results, even with inserted dims """ a = np.ones((3, 4, 5)) ai = np.ones((3, 0, 5), dtype=np.intp) actual = take_along_axis(a, ai, axis=1) assert_equal(actual.shape, ai.shape) def test_broadcast(self): """ Test that non-indexing dimensions are broadcast in both directions """ a = np.ones((3, 4, 1)) ai = np.ones((1, 2, 5), dtype=np.intp) actual = take_along_axis(a, ai, axis=1) assert_equal(actual.shape, (3, 2, 5)) class TestPutAlongAxis: def test_replace_max(self): a_base = np.array([[10, 30, 20], [60, 40, 50]]) for axis in list(range(a_base.ndim)) + [None]: # we mutate this in the loop a = a_base.copy() # replace the max with a small value i_max = _add_keepdims(np.argmax)(a, axis=axis) put_along_axis(a, i_max, -99, axis=axis) # find the new minimum, which should max i_min = _add_keepdims(np.argmin)(a, axis=axis) assert_equal(i_min, i_max) def test_broadcast(self): """ Test that non-indexing dimensions are broadcast in both directions """ a = np.ones((3, 4, 1)) ai = np.arange(10, dtype=np.intp).reshape((1, 2, 5)) % 4 put_along_axis(a, ai, 20, axis=1) assert_equal(take_along_axis(a, ai, axis=1), 20) class TestApplyAlongAxis: def test_simple(self): a = np.ones((20, 10), 'd') assert_array_equal( apply_along_axis(len, 0, a), len(a)*np.ones(a.shape[1])) def test_simple101(self): a = np.ones((10, 101), 'd') assert_array_equal( apply_along_axis(len, 0, a), len(a)*np.ones(a.shape[1])) def test_3d(self): a = np.arange(27).reshape((3, 3, 3)) assert_array_equal(apply_along_axis(np.sum, 0, a), [[27, 30, 33], [36, 39, 42], [45, 48, 51]]) def test_preserve_subclass(self): def double(row): return row * 2 class MyNDArray(np.ndarray): pass m = np.array([[0, 1], [2, 3]]).view(MyNDArray) expected = np.array([[0, 2], [4, 6]]).view(MyNDArray) result = apply_along_axis(double, 0, m) assert_(isinstance(result, MyNDArray)) assert_array_equal(result, expected) result = apply_along_axis(double, 1, m) assert_(isinstance(result, MyNDArray)) assert_array_equal(result, expected) def test_subclass(self): class MinimalSubclass(np.ndarray): data = 1 def minimal_function(array): return array.data a = np.zeros((6, 3)).view(MinimalSubclass) assert_array_equal( apply_along_axis(minimal_function, 0, a), np.array([1, 1, 1]) ) def test_scalar_array(self, cls=np.ndarray): a = np.ones((6, 3)).view(cls) res = apply_along_axis(np.sum, 0, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([6, 6, 6]).view(cls)) def test_0d_array(self, cls=np.ndarray): def sum_to_0d(x): """ Sum x, returning a 0d array of the same class """ assert_equal(x.ndim, 1) return np.squeeze(np.sum(x, keepdims=True)) a = np.ones((6, 3)).view(cls) res = apply_along_axis(sum_to_0d, 0, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([6, 6, 6]).view(cls)) res = apply_along_axis(sum_to_0d, 1, a) assert_(isinstance(res, cls)) assert_array_equal(res, np.array([3, 3, 3, 3, 3, 3]).view(cls)) def test_axis_insertion(self, cls=np.ndarray): def f1to2(x): """produces an asymmetric non-square matrix from x""" assert_equal(x.ndim, 1) return (x[::-1] * x[1:,None]).view(cls) a2d = np.arange(6*3).reshape((6, 3)) # 2d insertion along first axis actual = apply_along_axis(f1to2, 0, a2d) expected = np.stack([ f1to2(a2d[:,i]) for i in range(a2d.shape[1]) ], axis=-1).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) # 2d insertion along last axis actual = apply_along_axis(f1to2, 1, a2d) expected = np.stack([ f1to2(a2d[i,:]) for i in range(a2d.shape[0]) ], axis=0).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) # 3d insertion along middle axis a3d = np.arange(6*5*3).reshape((6, 5, 3)) actual = apply_along_axis(f1to2, 1, a3d) expected = np.stack([ np.stack([ f1to2(a3d[i,:,j]) for i in range(a3d.shape[0]) ], axis=0) for j in range(a3d.shape[2]) ], axis=-1).view(cls) assert_equal(type(actual), type(expected)) assert_equal(actual, expected) def test_subclass_preservation(self): class MinimalSubclass(np.ndarray): pass self.test_scalar_array(MinimalSubclass) self.test_0d_array(MinimalSubclass) self.test_axis_insertion(MinimalSubclass) def test_axis_insertion_ma(self): def f1to2(x): """produces an asymmetric non-square matrix from x""" assert_equal(x.ndim, 1) res = x[::-1] * x[1:,None] return np.ma.masked_where(res%5==0, res) a = np.arange(6*3).reshape((6, 3)) res = apply_along_axis(f1to2, 0, a) assert_(isinstance(res, np.ma.masked_array)) assert_equal(res.ndim, 3) assert_array_equal(res[:,:,0].mask, f1to2(a[:,0]).mask) assert_array_equal(res[:,:,1].mask, f1to2(a[:,1]).mask) assert_array_equal(res[:,:,2].mask, f1to2(a[:,2]).mask) def test_tuple_func1d(self): def sample_1d(x): return x[1], x[0] res = np.apply_along_axis(sample_1d, 1, np.array([[1, 2], [3, 4]])) assert_array_equal(res, np.array([[2, 1], [4, 3]])) def test_empty(self): # can't apply_along_axis when there's no chance to call the function def never_call(x): assert_(False) # should never be reached a = np.empty((0, 0)) assert_raises(ValueError, np.apply_along_axis, never_call, 0, a) assert_raises(ValueError, np.apply_along_axis, never_call, 1, a) # but it's sometimes ok with some non-zero dimensions def empty_to_1(x): assert_(len(x) == 0) return 1 a = np.empty((10, 0)) actual = np.apply_along_axis(empty_to_1, 1, a) assert_equal(actual, np.ones(10)) assert_raises(ValueError, np.apply_along_axis, empty_to_1, 0, a) def test_with_iterable_object(self): # from issue 5248 d = np.array([ [{1, 11}, {2, 22}, {3, 33}], [{4, 44}, {5, 55}, {6, 66}] ]) actual = np.apply_along_axis(lambda a: set.union(*a), 0, d) expected = np.array([{1, 11, 4, 44}, {2, 22, 5, 55}, {3, 33, 6, 66}]) assert_equal(actual, expected) # issue 8642 - assert_equal doesn't detect this! for i in np.ndindex(actual.shape): assert_equal(type(actual[i]), type(expected[i])) class TestApplyOverAxes: def test_simple(self): a = np.arange(24).reshape(2, 3, 4) aoa_a = apply_over_axes(np.sum, a, [0, 2]) assert_array_equal(aoa_a, np.array([[[60], [92], [124]]])) class TestExpandDims: def test_functionality(self): s = (2, 3, 4, 5) a = np.empty(s) for axis in range(-5, 4): b = expand_dims(a, axis) assert_(b.shape[axis] == 1) assert_(np.squeeze(b).shape == s) def test_axis_tuple(self): a = np.empty((3, 3, 3)) assert np.expand_dims(a, axis=(0, 1, 2)).shape == (1, 1, 1, 3, 3, 3) assert np.expand_dims(a, axis=(0, -1, -2)).shape == (1, 3, 3, 3, 1, 1) assert np.expand_dims(a, axis=(0, 3, 5)).shape == (1, 3, 3, 1, 3, 1) assert np.expand_dims(a, axis=(0, -3, -5)).shape == (1, 1, 3, 1, 3, 3) def test_axis_out_of_range(self): s = (2, 3, 4, 5) a = np.empty(s) assert_raises(np.AxisError, expand_dims, a, -6) assert_raises(np.AxisError, expand_dims, a, 5) a = np.empty((3, 3, 3)) assert_raises(np.AxisError, expand_dims, a, (0, -6)) assert_raises(np.AxisError, expand_dims, a, (0, 5)) def test_repeated_axis(self): a = np.empty((3, 3, 3)) assert_raises(ValueError, expand_dims, a, axis=(1, 1)) def test_subclasses(self): a = np.arange(10).reshape((2, 5)) a = np.ma.array(a, mask=a%3 == 0) expanded = np.expand_dims(a, axis=1) assert_(isinstance(expanded, np.ma.MaskedArray)) assert_equal(expanded.shape, (2, 1, 5)) assert_equal(expanded.mask.shape, (2, 1, 5)) class TestArraySplit: def test_integer_0_split(self): a = np.arange(10) assert_raises(ValueError, array_split, a, 0) def test_integer_split(self): a = np.arange(10) res = array_split(a, 1) desired = [np.arange(10)] compare_results(res, desired) res = array_split(a, 2) desired = [np.arange(5), np.arange(5, 10)] compare_results(res, desired) res = array_split(a, 3) desired = [np.arange(4), np.arange(4, 7), np.arange(7, 10)] compare_results(res, desired) res = array_split(a, 4) desired = [np.arange(3), np.arange(3, 6), np.arange(6, 8), np.arange(8, 10)] compare_results(res, desired) res = array_split(a, 5) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 8), np.arange(8, 10)] compare_results(res, desired) res = array_split(a, 6) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 7) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 8) desired = [np.arange(2), np.arange(2, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 9) desired = [np.arange(2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 10) desired = [np.arange(1), np.arange(1, 2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10)] compare_results(res, desired) res = array_split(a, 11) desired = [np.arange(1), np.arange(1, 2), np.arange(2, 3), np.arange(3, 4), np.arange(4, 5), np.arange(5, 6), np.arange(6, 7), np.arange(7, 8), np.arange(8, 9), np.arange(9, 10), np.array([])] compare_results(res, desired) def test_integer_split_2D_rows(self): a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3, axis=0) tgt = [np.array([np.arange(10)]), np.array([np.arange(10)]), np.zeros((0, 10))] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) # Same thing for manual splits: res = array_split(a, [0, 1, 2], axis=0) tgt = [np.zeros((0, 10)), np.array([np.arange(10)]), np.array([np.arange(10)])] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) def test_integer_split_2D_cols(self): a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3, axis=-1) desired = [np.array([np.arange(4), np.arange(4)]), np.array([np.arange(4, 7), np.arange(4, 7)]), np.array([np.arange(7, 10), np.arange(7, 10)])] compare_results(res, desired) def test_integer_split_2D_default(self): """ This will fail if we change default axis """ a = np.array([np.arange(10), np.arange(10)]) res = array_split(a, 3) tgt = [np.array([np.arange(10)]), np.array([np.arange(10)]), np.zeros((0, 10))] compare_results(res, tgt) assert_(a.dtype.type is res[-1].dtype.type) # perhaps should check higher dimensions @pytest.mark.skipif(not IS_64BIT, reason="Needs 64bit platform") def test_integer_split_2D_rows_greater_max_int32(self): a = np.broadcast_to([0], (1 << 32, 2)) res = array_split(a, 4) chunk = np.broadcast_to([0], (1 << 30, 2)) tgt = [chunk] * 4 for i in range(len(tgt)): assert_equal(res[i].shape, tgt[i].shape) def test_index_split_simple(self): a = np.arange(10) indices = [1, 5, 7] res = array_split(a, indices, axis=-1) desired = [np.arange(0, 1), np.arange(1, 5), np.arange(5, 7), np.arange(7, 10)] compare_results(res, desired) def test_index_split_low_bound(self): a = np.arange(10) indices = [0, 5, 7] res = array_split(a, indices, axis=-1) desired = [np.array([]), np.arange(0, 5), np.arange(5, 7), np.arange(7, 10)] compare_results(res, desired) def test_index_split_high_bound(self): a = np.arange(10) indices = [0, 5, 7, 10, 12] res = array_split(a, indices, axis=-1) desired = [np.array([]), np.arange(0, 5), np.arange(5, 7), np.arange(7, 10), np.array([]), np.array([])] compare_results(res, desired) class TestSplit: # The split function is essentially the same as array_split, # except that it test if splitting will result in an # equal split. Only test for this case. def test_equal_split(self): a = np.arange(10) res = split(a, 2) desired = [np.arange(5), np.arange(5, 10)] compare_results(res, desired) def test_unequal_split(self): a = np.arange(10) assert_raises(ValueError, split, a, 3) class TestColumnStack: def test_non_iterable(self): assert_raises(TypeError, column_stack, 1) def test_1D_arrays(self): # example from docstring a = np.array((1, 2, 3)) b = np.array((2, 3, 4)) expected = np.array([[1, 2], [2, 3], [3, 4]]) actual = np.column_stack((a, b)) assert_equal(actual, expected) def test_2D_arrays(self): # same as hstack 2D docstring example a = np.array([[1], [2], [3]]) b = np.array([[2], [3], [4]]) expected = np.array([[1, 2], [2, 3], [3, 4]]) actual = np.column_stack((a, b)) assert_equal(actual, expected) def test_generator(self): with assert_warns(FutureWarning): column_stack((np.arange(3) for _ in range(2))) class TestDstack: def test_non_iterable(self): assert_raises(TypeError, dstack, 1) def test_0D_array(self): a = np.array(1) b = np.array(2) res = dstack([a, b]) desired = np.array([[[1, 2]]]) assert_array_equal(res, desired) def test_1D_array(self): a = np.array([1]) b = np.array([2]) res = dstack([a, b]) desired = np.array([[[1, 2]]]) assert_array_equal(res, desired) def test_2D_array(self): a = np.array([[1], [2]]) b = np.array([[1], [2]]) res = dstack([a, b]) desired = np.array([[[1, 1]], [[2, 2, ]]]) assert_array_equal(res, desired) def test_2D_array2(self): a = np.array([1, 2]) b = np.array([1, 2]) res = dstack([a, b]) desired = np.array([[[1, 1], [2, 2]]]) assert_array_equal(res, desired) def test_generator(self): with assert_warns(FutureWarning): dstack((np.arange(3) for _ in range(2))) # array_split has more comprehensive test of splitting. # only do simple test on hsplit, vsplit, and dsplit class TestHsplit: """Only testing for integer splits. """ def test_non_iterable(self): assert_raises(ValueError, hsplit, 1, 1) def test_0D_array(self): a = np.array(1) try: hsplit(a, 2) assert_(0) except ValueError: pass def test_1D_array(self): a = np.array([1, 2, 3, 4]) res = hsplit(a, 2) desired = [np.array([1, 2]), np.array([3, 4])] compare_results(res, desired) def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) res = hsplit(a, 2) desired = [np.array([[1, 2], [1, 2]]), np.array([[3, 4], [3, 4]])] compare_results(res, desired) class TestVsplit: """Only testing for integer splits. """ def test_non_iterable(self): assert_raises(ValueError, vsplit, 1, 1) def test_0D_array(self): a = np.array(1) assert_raises(ValueError, vsplit, a, 2) def test_1D_array(self): a = np.array([1, 2, 3, 4]) try: vsplit(a, 2) assert_(0) except ValueError: pass def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) res = vsplit(a, 2) desired = [np.array([[1, 2, 3, 4]]), np.array([[1, 2, 3, 4]])] compare_results(res, desired) class TestDsplit: # Only testing for integer splits. def test_non_iterable(self): assert_raises(ValueError, dsplit, 1, 1) def test_0D_array(self): a = np.array(1) assert_raises(ValueError, dsplit, a, 2) def test_1D_array(self): a = np.array([1, 2, 3, 4]) assert_raises(ValueError, dsplit, a, 2) def test_2D_array(self): a = np.array([[1, 2, 3, 4], [1, 2, 3, 4]]) try: dsplit(a, 2) assert_(0) except ValueError: pass def test_3D_array(self): a = np.array([[[1, 2, 3, 4], [1, 2, 3, 4]], [[1, 2, 3, 4], [1, 2, 3, 4]]]) res = dsplit(a, 2) desired = [np.array([[[1, 2], [1, 2]], [[1, 2], [1, 2]]]), np.array([[[3, 4], [3, 4]], [[3, 4], [3, 4]]])] compare_results(res, desired) class TestSqueeze: def test_basic(self): from numpy.random import rand a = rand(20, 10, 10, 1, 1) b = rand(20, 1, 10, 1, 20) c = rand(1, 1, 20, 10) assert_array_equal(np.squeeze(a), np.reshape(a, (20, 10, 10))) assert_array_equal(np.squeeze(b), np.reshape(b, (20, 10, 20))) assert_array_equal(np.squeeze(c), np.reshape(c, (20, 10))) # Squeezing to 0-dim should still give an ndarray a = [[[1.5]]] res = np.squeeze(a) assert_equal(res, 1.5) assert_equal(res.ndim, 0) assert_equal(type(res), np.ndarray) class TestKron: def test_return_type(self): class myarray(np.ndarray): __array_priority__ = 0.0 a = np.ones([2, 2]) ma = myarray(a.shape, a.dtype, a.data) assert_equal(type(kron(a, a)), np.ndarray) assert_equal(type(kron(ma, ma)), myarray) assert_equal(type(kron(a, ma)), np.ndarray) assert_equal(type(kron(ma, a)), myarray) class TestTile: def test_basic(self): a = np.array([0, 1, 2]) b = [[1, 2], [3, 4]] assert_equal(tile(a, 2), [0, 1, 2, 0, 1, 2]) assert_equal(tile(a, (2, 2)), [[0, 1, 2, 0, 1, 2], [0, 1, 2, 0, 1, 2]]) assert_equal(tile(a, (1, 2)), [[0, 1, 2, 0, 1, 2]]) assert_equal(tile(b, 2), [[1, 2, 1, 2], [3, 4, 3, 4]]) assert_equal(tile(b, (2, 1)), [[1, 2], [3, 4], [1, 2], [3, 4]]) assert_equal(tile(b, (2, 2)), [[1, 2, 1, 2], [3, 4, 3, 4], [1, 2, 1, 2], [3, 4, 3, 4]]) def test_tile_one_repetition_on_array_gh4679(self): a = np.arange(5) b = tile(a, 1) b += 2 assert_equal(a, np.arange(5)) def test_empty(self): a = np.array([[[]]]) b = np.array([[], []]) c = tile(b, 2).shape d = tile(a, (3, 2, 5)).shape assert_equal(c, (2, 0)) assert_equal(d, (3, 2, 0)) def test_kroncompare(self): from numpy.random import randint reps = [(2,), (1, 2), (2, 1), (2, 2), (2, 3, 2), (3, 2)] shape = [(3,), (2, 3), (3, 4, 3), (3, 2, 3), (4, 3, 2, 4), (2, 2)] for s in shape: b = randint(0, 10, size=s) for r in reps: a = np.ones(r, b.dtype) large = tile(b, r) klarge = kron(a, b) assert_equal(large, klarge) class TestMayShareMemory: def test_basic(self): d = np.ones((50, 60)) d2 = np.ones((30, 60, 6)) assert_(np.may_share_memory(d, d)) assert_(np.may_share_memory(d, d[::-1])) assert_(np.may_share_memory(d, d[::2])) assert_(np.may_share_memory(d, d[1:, ::-1])) assert_(not np.may_share_memory(d[::-1], d2)) assert_(not np.may_share_memory(d[::2], d2)) assert_(not np.may_share_memory(d[1:, ::-1], d2)) assert_(np.may_share_memory(d2[1:, ::-1], d2)) # Utility def compare_results(res, desired): for i in range(len(desired)): assert_array_equal(res[i], desired[i])
mhvk/numpy
numpy/lib/tests/test_shape_base.py
numpy/core/code_generators/numpy_api.py