input
stringlengths
53
297k
output
stringclasses
604 values
repo_name
stringclasses
376 values
test_path
stringclasses
583 values
code_path
stringlengths
7
116
"""Support for Supla cover - curtains, rollershutters, entry gate etc.""" import logging from pprint import pformat from homeassistant.components.cover import ( ATTR_POSITION, DEVICE_CLASS_GARAGE, CoverEntity, ) from homeassistant.components.supla import ( DOMAIN, SUPLA_COORDINATORS, SUPLA_SERVERS, SuplaChannel, ) _LOGGER = logging.getLogger(__name__) SUPLA_SHUTTER = "CONTROLLINGTHEROLLERSHUTTER" SUPLA_GATE = "CONTROLLINGTHEGATE" async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Supla covers.""" if discovery_info is None: return _LOGGER.debug("Discovery: %s", pformat(discovery_info)) entities = [] for device in discovery_info: device_name = device["function_name"] server_name = device["server_name"] if device_name == SUPLA_SHUTTER: entities.append( SuplaCover( device, hass.data[DOMAIN][SUPLA_SERVERS][server_name], hass.data[DOMAIN][SUPLA_COORDINATORS][server_name], ) ) elif device_name == SUPLA_GATE: entities.append( SuplaGateDoor( device, hass.data[DOMAIN][SUPLA_SERVERS][server_name], hass.data[DOMAIN][SUPLA_COORDINATORS][server_name], ) ) async_add_entities(entities) class SuplaCover(SuplaChannel, CoverEntity): """Representation of a Supla Cover.""" @property def current_cover_position(self): """Return current position of cover. 0 is closed, 100 is open.""" state = self.channel_data.get("state") if state: return 100 - state["shut"] return None async def async_set_cover_position(self, **kwargs): """Move the cover to a specific position.""" await self.async_action("REVEAL", percentage=kwargs.get(ATTR_POSITION)) @property def is_closed(self): """Return if the cover is closed.""" if self.current_cover_position is None: return None return self.current_cover_position == 0 async def async_open_cover(self, **kwargs): """Open the cover.""" await self.async_action("REVEAL") async def async_close_cover(self, **kwargs): """Close the cover.""" await self.async_action("SHUT") async def async_stop_cover(self, **kwargs): """Stop the cover.""" await self.async_action("STOP") class SuplaGateDoor(SuplaChannel, CoverEntity): """Representation of a Supla gate door.""" @property def is_closed(self): """Return if the gate is closed or not.""" state = self.channel_data.get("state") if state and "hi" in state: return state.get("hi") return None async def async_open_cover(self, **kwargs) -> None: """Open the gate.""" if self.is_closed: await self.async_action("OPEN_CLOSE") async def async_close_cover(self, **kwargs) -> None: """Close the gate.""" if not self.is_closed: await self.async_action("OPEN_CLOSE") async def async_stop_cover(self, **kwargs) -> None: """Stop the gate.""" await self.async_action("OPEN_CLOSE") async def async_toggle(self, **kwargs) -> None: """Toggle the gate.""" await self.async_action("OPEN_CLOSE") @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return DEVICE_CLASS_GARAGE
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/supla/cover.py
"""Offer webhook triggered automation rules.""" from functools import partial from aiohttp import hdrs import voluptuous as vol from homeassistant.const import CONF_PLATFORM, CONF_WEBHOOK_ID from homeassistant.core import HassJob, callback import homeassistant.helpers.config_validation as cv # mypy: allow-untyped-defs DEPENDENCIES = ("webhook",) TRIGGER_SCHEMA = vol.Schema( {vol.Required(CONF_PLATFORM): "webhook", vol.Required(CONF_WEBHOOK_ID): cv.string} ) async def _handle_webhook(job, trigger_id, hass, webhook_id, request): """Handle incoming webhook.""" result = {"platform": "webhook", "webhook_id": webhook_id} if "json" in request.headers.get(hdrs.CONTENT_TYPE, ""): result["json"] = await request.json() else: result["data"] = await request.post() result["query"] = request.query result["description"] = "webhook" result["id"] = trigger_id hass.async_run_hass_job(job, {"trigger": result}) async def async_attach_trigger(hass, config, action, automation_info): """Trigger based on incoming webhooks.""" trigger_id = automation_info.get("trigger_id") if automation_info else None webhook_id = config.get(CONF_WEBHOOK_ID) job = HassJob(action) hass.components.webhook.async_register( automation_info["domain"], automation_info["name"], webhook_id, partial(_handle_webhook, job, trigger_id), ) @callback def unregister(): """Unregister webhook.""" hass.components.webhook.async_unregister(webhook_id) return unregister
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/webhook/trigger.py
# Copyright 2014-2019 Thomas Schatz, Mathieu Bernard, Roland Thiolliere # # This file is part of h5features. # # h5features is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # h5features is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with h5features. If not, see <http://www.gnu.org/licenses/>. """This package defines a standard to read/write features from/to HDF5 files. .. note:: **The functions are not concurrent nor thread-safe** because the HDF5 library is not concurrent and not always thread-safe. Moreover, they aren't even atomic for independent process (because there are several independent calls to the file system), so that thread-safety and atomicity of operations should be enforced externally when necessary. """ from .h5features import read from .h5features import write from .h5features import simple_write from .converter import Converter from .data import Data from .reader import Reader from .writer import Writer
"""Test of the h5features.index module.""" import h5py import pytest from .aux import generate from .aux.utils import remove from h5features import Data, index class TestIndex: def setup(self): self.filename = 'test.h5' self.teardown() self.h5file = h5py.File(self.filename, 'w') self.group = self.h5file.create_group('group') def teardown(self): remove(self.filename) def test_create(self): index.create_index(self.group, 0.1) assert list(self.group.keys()) == ['index'] # Exception is OSError for h5py<3.0 and ValueError for h5py>=3.0 with pytest.raises(Exception) as err: index.create_index(self.group, 0.1) assert 'name already exists' in str(err.value).lower() def test_write(self): index.create_index(self.group, 0.1) items, times, features = generate.full(100, dim=5, max_frames=10) data = Data(items, times, features) index.write_index(data, self.group, append=False)
bootphon/h5features
test/test_index.py
h5features/__init__.py
"""Config flow for Mobile App.""" import uuid from homeassistant import config_entries from homeassistant.components import person from homeassistant.helpers import entity_registry from .const import ATTR_APP_ID, ATTR_DEVICE_ID, ATTR_DEVICE_NAME, CONF_USER_ID, DOMAIN @config_entries.HANDLERS.register(DOMAIN) class MobileAppFlowHandler(config_entries.ConfigFlow): """Handle a Mobile App config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_PUSH async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" placeholders = { "apps_url": "https://www.home-assistant.io/integrations/mobile_app/#apps" } return self.async_abort( reason="install_app", description_placeholders=placeholders ) async def async_step_registration(self, user_input=None): """Handle a flow initialized during registration.""" if ATTR_DEVICE_ID in user_input: # Unique ID is combi of app + device ID. await self.async_set_unique_id( f"{user_input[ATTR_APP_ID]}-{user_input[ATTR_DEVICE_ID]}" ) else: user_input[ATTR_DEVICE_ID] = str(uuid.uuid4()).replace("-", "") # Register device tracker entity and add to person registering app ent_reg = await entity_registry.async_get_registry(self.hass) devt_entry = ent_reg.async_get_or_create( "device_tracker", DOMAIN, user_input[ATTR_DEVICE_ID], suggested_object_id=user_input[ATTR_DEVICE_NAME], ) await person.async_add_user_device_tracker( self.hass, user_input[CONF_USER_ID], devt_entry.entity_id ) return self.async_create_entry( title=user_input[ATTR_DEVICE_NAME], data=user_input )
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/mobile_app/config_flow.py
"""Config flow for Elexa Guardian integration.""" from aioguardian import Client from aioguardian.errors import GuardianError import voluptuous as vol from homeassistant import config_entries, core from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT from homeassistant.core import callback from .const import CONF_UID, DOMAIN, LOGGER # pylint:disable=unused-import DATA_SCHEMA = vol.Schema( {vol.Required(CONF_IP_ADDRESS): str, vol.Required(CONF_PORT, default=7777): int} ) UNIQUE_ID = "guardian_{0}" @callback def async_get_pin_from_discovery_hostname(hostname): """Get the device's 4-digit PIN from its zeroconf-discovered hostname.""" return hostname.split(".")[0].split("-")[1] @callback def async_get_pin_from_uid(uid): """Get the device's 4-digit PIN from its UID.""" return uid[-4:] async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. """ async with Client(data[CONF_IP_ADDRESS]) as client: ping_data = await client.system.ping() return { CONF_UID: ping_data["data"]["uid"], } class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Elexa Guardian.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize.""" self.discovery_info = {} async def _async_set_unique_id(self, pin): """Set the config entry's unique ID (based on the device's 4-digit PIN).""" await self.async_set_unique_id(UNIQUE_ID.format(pin)) self._abort_if_unique_id_configured() async def async_step_user(self, user_input=None): """Handle configuration via the UI.""" if user_input is None: return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors={} ) try: info = await validate_input(self.hass, user_input) except GuardianError as err: LOGGER.error("Error while connecting to unit: %s", err) return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors={CONF_IP_ADDRESS: "cannot_connect"}, ) pin = async_get_pin_from_uid(info[CONF_UID]) await self._async_set_unique_id(pin) return self.async_create_entry( title=info[CONF_UID], data={CONF_UID: info["uid"], **user_input} ) async def async_step_zeroconf(self, discovery_info): """Handle the configuration via zeroconf.""" if discovery_info is None: return self.async_abort(reason="connection_error") pin = async_get_pin_from_discovery_hostname(discovery_info["hostname"]) await self._async_set_unique_id(pin) # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 self.context[CONF_IP_ADDRESS] = discovery_info["host"] if any( discovery_info["host"] == flow["context"][CONF_IP_ADDRESS] for flow in self._async_in_progress() ): return self.async_abort(reason="already_in_progress") self.discovery_info = { CONF_IP_ADDRESS: discovery_info["host"], CONF_PORT: discovery_info["port"], } return await self.async_step_zeroconf_confirm() async def async_step_zeroconf_confirm(self, user_input=None): """Finish the configuration via zeroconf.""" if user_input is None: return self.async_show_form(step_id="zeroconf_confirm") return await self.async_step_user(self.discovery_info)
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/guardian/config_flow.py
"""Config flow for Monoprice 6-Zone Amplifier integration.""" import logging from pymonoprice import get_async_monoprice from serial import SerialException import voluptuous as vol from homeassistant import config_entries, core, exceptions from homeassistant.const import CONF_PORT from .const import ( CONF_SOURCE_1, CONF_SOURCE_2, CONF_SOURCE_3, CONF_SOURCE_4, CONF_SOURCE_5, CONF_SOURCE_6, CONF_SOURCES, ) from .const import DOMAIN # pylint:disable=unused-import _LOGGER = logging.getLogger(__name__) SOURCES = [ CONF_SOURCE_1, CONF_SOURCE_2, CONF_SOURCE_3, CONF_SOURCE_4, CONF_SOURCE_5, CONF_SOURCE_6, ] OPTIONS_FOR_DATA = {vol.Optional(source): str for source in SOURCES} DATA_SCHEMA = vol.Schema({vol.Required(CONF_PORT): str, **OPTIONS_FOR_DATA}) @core.callback def _sources_from_config(data): sources_config = { str(idx + 1): data.get(source) for idx, source in enumerate(SOURCES) } return { index: name.strip() for index, name in sources_config.items() if (name is not None and name.strip() != "") } async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. """ try: await get_async_monoprice(data[CONF_PORT], hass.loop) except SerialException: _LOGGER.error("Error connecting to Monoprice controller") raise CannotConnect sources = _sources_from_config(data) # Return info that you want to store in the config entry. return {CONF_PORT: data[CONF_PORT], CONF_SOURCES: sources} class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Monoprice 6-Zone Amplifier.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: try: info = await validate_input(self.hass, user_input) return self.async_create_entry(title=user_input[CONF_PORT], data=info) except CannotConnect: errors["base"] = "cannot_connect" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors ) @staticmethod @core.callback def async_get_options_flow(config_entry): """Define the config flow to handle options.""" return MonopriceOptionsFlowHandler(config_entry) @core.callback def _key_for_source(index, source, previous_sources): if str(index) in previous_sources: key = vol.Optional( source, description={"suggested_value": previous_sources[str(index)]} ) else: key = vol.Optional(source) return key class MonopriceOptionsFlowHandler(config_entries.OptionsFlow): """Handle a Monoprice options flow.""" def __init__(self, config_entry): """Initialize.""" self.config_entry = config_entry @core.callback def _previous_sources(self): if CONF_SOURCES in self.config_entry.options: previous = self.config_entry.options[CONF_SOURCES] else: previous = self.config_entry.data[CONF_SOURCES] return previous async def async_step_init(self, user_input=None): """Manage the options.""" if user_input is not None: return self.async_create_entry( title="", data={CONF_SOURCES: _sources_from_config(user_input)} ) previous_sources = self._previous_sources() options = { _key_for_source(idx + 1, source, previous_sources): str for idx, source in enumerate(SOURCES) } return self.async_show_form(step_id="init", data_schema=vol.Schema(options),) class CannotConnect(exceptions.HomeAssistantError): """Error to indicate we cannot connect."""
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/monoprice/config_flow.py
"""Support for Gogogate2 garage Doors.""" import logging from typing import Callable, List, Optional from gogogate2_api.common import Door, DoorStatus, get_configured_doors, get_door_by_id import voluptuous as vol from homeassistant.components.cover import ( DEVICE_CLASS_GARAGE, SUPPORT_CLOSE, SUPPORT_OPEN, CoverEntity, ) from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from .common import ( GogoGateDataUpdateCoordinator, cover_unique_id, get_data_update_coordinator, ) from .const import DOMAIN _LOGGER = logging.getLogger(__name__) COVER_SCHEMA = vol.Schema( { vol.Required(CONF_IP_ADDRESS): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_USERNAME): cv.string, } ) async def async_setup_platform( hass: HomeAssistant, config: dict, add_entities: Callable, discovery_info=None ) -> None: """Convert old style file configs to new style configs.""" hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=config ) ) async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: Callable[[List[Entity], Optional[bool]], None], ) -> None: """Set up the config entry.""" data_update_coordinator = get_data_update_coordinator(hass, config_entry) async_add_entities( [ Gogogate2Cover(config_entry, data_update_coordinator, door) for door in get_configured_doors(data_update_coordinator.data) ] ) class Gogogate2Cover(CoverEntity): """Cover entity for goggate2.""" def __init__( self, config_entry: ConfigEntry, data_update_coordinator: GogoGateDataUpdateCoordinator, door: Door, ) -> None: """Initialize the object.""" self._config_entry = config_entry self._data_update_coordinator = data_update_coordinator self._door = door self._api = data_update_coordinator.api self._unique_id = cover_unique_id(config_entry, door) self._is_available = True @property def available(self) -> bool: """Return True if entity is available.""" return self._is_available @property def should_poll(self) -> bool: """Return False as the data manager handles dispatching data.""" return False @property def unique_id(self) -> Optional[str]: """Return a unique ID.""" return self._unique_id @property def name(self): """Return the name of the door.""" return self._door.name @property def is_closed(self): """Return true if cover is closed, else False.""" if self._door.status == DoorStatus.OPENED: return False if self._door.status == DoorStatus.CLOSED: return True return None @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return DEVICE_CLASS_GARAGE @property def supported_features(self): """Flag supported features.""" return SUPPORT_OPEN | SUPPORT_CLOSE async def async_open_cover(self, **kwargs): """Open the door.""" await self.hass.async_add_executor_job(self._api.open_door, self._door.door_id) async def async_close_cover(self, **kwargs): """Close the door.""" await self.hass.async_add_executor_job(self._api.close_door, self._door.door_id) @property def state_attributes(self): """Return the state attributes.""" attrs = super().state_attributes attrs["door_id"] = self._door.door_id return attrs @callback def async_on_data_updated(self) -> None: """Receive data from data dispatcher.""" if not self._data_update_coordinator.last_update_success: self._is_available = False self.async_write_ha_state() return door = get_door_by_id(self._door.door_id, self._data_update_coordinator.data) # Set the state. self._door = door self._is_available = True self.async_write_ha_state() async def async_added_to_hass(self) -> None: """Register update dispatcher.""" self.async_on_remove( self._data_update_coordinator.async_add_listener(self.async_on_data_updated) )
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/gogogate2/cover.py
"""Allows to configure a switch using RPi GPIO.""" import logging import voluptuous as vol from homeassistant.components import rpi_gpio from homeassistant.components.switch import PLATFORM_SCHEMA from homeassistant.const import DEVICE_DEFAULT_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import ToggleEntity _LOGGER = logging.getLogger(__name__) CONF_PULL_MODE = "pull_mode" CONF_PORTS = "ports" CONF_INVERT_LOGIC = "invert_logic" DEFAULT_INVERT_LOGIC = False _SWITCHES_SCHEMA = vol.Schema({cv.positive_int: cv.string}) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_PORTS): _SWITCHES_SCHEMA, vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Raspberry PI GPIO devices.""" invert_logic = config.get(CONF_INVERT_LOGIC) switches = [] ports = config.get(CONF_PORTS) for port, name in ports.items(): switches.append(RPiGPIOSwitch(name, port, invert_logic)) add_entities(switches) class RPiGPIOSwitch(ToggleEntity): """Representation of a Raspberry Pi GPIO.""" def __init__(self, name, port, invert_logic): """Initialize the pin.""" self._name = name or DEVICE_DEFAULT_NAME self._port = port self._invert_logic = invert_logic self._state = False rpi_gpio.setup_output(self._port) rpi_gpio.write_output(self._port, 1 if self._invert_logic else 0) @property def name(self): """Return the name of the switch.""" return self._name @property def should_poll(self): """No polling needed.""" return False @property def is_on(self): """Return true if device is on.""" return self._state def turn_on(self, **kwargs): """Turn the device on.""" rpi_gpio.write_output(self._port, 0 if self._invert_logic else 1) self._state = True self.schedule_update_ha_state() def turn_off(self, **kwargs): """Turn the device off.""" rpi_gpio.write_output(self._port, 1 if self._invert_logic else 0) self._state = False self.schedule_update_ha_state()
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/rpi_gpio/switch.py
"""Support for the OpenWeatherMap (OWM) service.""" from datetime import timedelta import logging from pyowm import OWM from pyowm.exceptions.api_call_error import APICallError import voluptuous as vol from homeassistant.components.weather import ( ATTR_FORECAST_CONDITION, ATTR_FORECAST_PRECIPITATION, ATTR_FORECAST_TEMP, ATTR_FORECAST_TEMP_LOW, ATTR_FORECAST_TIME, ATTR_FORECAST_WIND_BEARING, ATTR_FORECAST_WIND_SPEED, PLATFORM_SCHEMA, WeatherEntity, ) from homeassistant.const import ( CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_MODE, CONF_NAME, PRESSURE_HPA, PRESSURE_INHG, STATE_UNKNOWN, TEMP_CELSIUS, ) import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle from homeassistant.util.pressure import convert as convert_pressure _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Data provided by OpenWeatherMap" FORECAST_MODE = ["hourly", "daily", "freedaily"] DEFAULT_NAME = "OpenWeatherMap" MIN_TIME_BETWEEN_FORECAST_UPDATES = timedelta(minutes=30) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=10) CONDITION_CLASSES = { "cloudy": [803, 804], "fog": [701, 741], "hail": [906], "lightning": [210, 211, 212, 221], "lightning-rainy": [200, 201, 202, 230, 231, 232], "partlycloudy": [801, 802], "pouring": [504, 314, 502, 503, 522], "rainy": [300, 301, 302, 310, 311, 312, 313, 500, 501, 520, 521], "snowy": [600, 601, 602, 611, 612, 620, 621, 622], "snowy-rainy": [511, 615, 616], "sunny": [800], "windy": [905, 951, 952, 953, 954, 955, 956, 957], "windy-variant": [958, 959, 960, 961], "exceptional": [711, 721, 731, 751, 761, 762, 771, 900, 901, 962, 903, 904], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_LATITUDE): cv.latitude, vol.Optional(CONF_LONGITUDE): cv.longitude, vol.Optional(CONF_MODE, default="hourly"): vol.In(FORECAST_MODE), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the OpenWeatherMap weather platform.""" longitude = config.get(CONF_LONGITUDE, round(hass.config.longitude, 5)) latitude = config.get(CONF_LATITUDE, round(hass.config.latitude, 5)) name = config.get(CONF_NAME) mode = config.get(CONF_MODE) try: owm = OWM(config.get(CONF_API_KEY)) except APICallError: _LOGGER.error("Error while connecting to OpenWeatherMap") return False data = WeatherData(owm, latitude, longitude, mode) add_entities( [OpenWeatherMapWeather(name, data, hass.config.units.temperature_unit, mode)], True, ) class OpenWeatherMapWeather(WeatherEntity): """Implementation of an OpenWeatherMap sensor.""" def __init__(self, name, owm, temperature_unit, mode): """Initialize the sensor.""" self._name = name self._owm = owm self._temperature_unit = temperature_unit self._mode = mode self.data = None self.forecast_data = None @property def name(self): """Return the name of the sensor.""" return self._name @property def condition(self): """Return the current condition.""" try: return [ k for k, v in CONDITION_CLASSES.items() if self.data.get_weather_code() in v ][0] except IndexError: return STATE_UNKNOWN @property def temperature(self): """Return the temperature.""" return self.data.get_temperature("celsius").get("temp") @property def temperature_unit(self): """Return the unit of measurement.""" return TEMP_CELSIUS @property def pressure(self): """Return the pressure.""" pressure = self.data.get_pressure().get("press") if self.hass.config.units.name == "imperial": return round(convert_pressure(pressure, PRESSURE_HPA, PRESSURE_INHG), 2) return pressure @property def humidity(self): """Return the humidity.""" return self.data.get_humidity() @property def wind_speed(self): """Return the wind speed.""" if self.hass.config.units.name == "imperial": return round(self.data.get_wind().get("speed") * 2.24, 2) return round(self.data.get_wind().get("speed") * 3.6, 2) @property def wind_bearing(self): """Return the wind bearing.""" return self.data.get_wind().get("deg") @property def attribution(self): """Return the attribution.""" return ATTRIBUTION @property def forecast(self): """Return the forecast array.""" data = [] def calc_precipitation(rain, snow): """Calculate the precipitation.""" rain_value = 0 if rain is None else rain snow_value = 0 if snow is None else snow if round(rain_value + snow_value, 1) == 0: return None return round(rain_value + snow_value, 1) if self._mode == "freedaily": weather = self.forecast_data.get_weathers()[::8] else: weather = self.forecast_data.get_weathers() for entry in weather: if self._mode == "daily": data.append( { ATTR_FORECAST_TIME: entry.get_reference_time("unix") * 1000, ATTR_FORECAST_TEMP: entry.get_temperature("celsius").get("day"), ATTR_FORECAST_TEMP_LOW: entry.get_temperature("celsius").get( "night" ), ATTR_FORECAST_PRECIPITATION: calc_precipitation( entry.get_rain().get("all"), entry.get_snow().get("all") ), ATTR_FORECAST_WIND_SPEED: entry.get_wind().get("speed"), ATTR_FORECAST_WIND_BEARING: entry.get_wind().get("deg"), ATTR_FORECAST_CONDITION: [ k for k, v in CONDITION_CLASSES.items() if entry.get_weather_code() in v ][0], } ) else: data.append( { ATTR_FORECAST_TIME: entry.get_reference_time("unix") * 1000, ATTR_FORECAST_TEMP: entry.get_temperature("celsius").get( "temp" ), ATTR_FORECAST_PRECIPITATION: ( round(entry.get_rain().get("3h"), 1) if entry.get_rain().get("3h") is not None and (round(entry.get_rain().get("3h"), 1) > 0) else None ), ATTR_FORECAST_CONDITION: [ k for k, v in CONDITION_CLASSES.items() if entry.get_weather_code() in v ][0], } ) return data def update(self): """Get the latest data from OWM and updates the states.""" try: self._owm.update() self._owm.update_forecast() except APICallError: _LOGGER.error("Exception when calling OWM web API to update data") return self.data = self._owm.data self.forecast_data = self._owm.forecast_data class WeatherData: """Get the latest data from OpenWeatherMap.""" def __init__(self, owm, latitude, longitude, mode): """Initialize the data object.""" self._mode = mode self.owm = owm self.latitude = latitude self.longitude = longitude self.data = None self.forecast_data = None @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data from OpenWeatherMap.""" obs = self.owm.weather_at_coords(self.latitude, self.longitude) if obs is None: _LOGGER.warning("Failed to fetch data from OWM") return self.data = obs.get_weather() @Throttle(MIN_TIME_BETWEEN_FORECAST_UPDATES) def update_forecast(self): """Get the latest forecast from OpenWeatherMap.""" try: if self._mode == "daily": fcd = self.owm.daily_forecast_at_coords( self.latitude, self.longitude, 15 ) else: fcd = self.owm.three_hours_forecast_at_coords( self.latitude, self.longitude ) except APICallError: _LOGGER.error("Exception when calling OWM web API to update forecast") return if fcd is None: _LOGGER.warning("Failed to fetch forecast data from OWM") return self.forecast_data = fcd.get_forecast()
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/openweathermap/weather.py
"""Syslog notification service.""" import logging import syslog import voluptuous as vol from homeassistant.components.notify import ( ATTR_TITLE, ATTR_TITLE_DEFAULT, PLATFORM_SCHEMA, BaseNotificationService, ) _LOGGER = logging.getLogger(__name__) CONF_FACILITY = "facility" CONF_OPTION = "option" CONF_PRIORITY = "priority" SYSLOG_FACILITY = { "kernel": "LOG_KERN", "user": "LOG_USER", "mail": "LOG_MAIL", "daemon": "LOG_DAEMON", "auth": "LOG_KERN", "LPR": "LOG_LPR", "news": "LOG_NEWS", "uucp": "LOG_UUCP", "cron": "LOG_CRON", "syslog": "LOG_SYSLOG", "local0": "LOG_LOCAL0", "local1": "LOG_LOCAL1", "local2": "LOG_LOCAL2", "local3": "LOG_LOCAL3", "local4": "LOG_LOCAL4", "local5": "LOG_LOCAL5", "local6": "LOG_LOCAL6", "local7": "LOG_LOCAL7", } SYSLOG_OPTION = { "pid": "LOG_PID", "cons": "LOG_CONS", "ndelay": "LOG_NDELAY", "nowait": "LOG_NOWAIT", "perror": "LOG_PERROR", } SYSLOG_PRIORITY = { 5: "LOG_EMERG", 4: "LOG_ALERT", 3: "LOG_CRIT", 2: "LOG_ERR", 1: "LOG_WARNING", 0: "LOG_NOTICE", -1: "LOG_INFO", -2: "LOG_DEBUG", } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_FACILITY, default="syslog"): vol.In(SYSLOG_FACILITY.keys()), vol.Optional(CONF_OPTION, default="pid"): vol.In(SYSLOG_OPTION.keys()), vol.Optional(CONF_PRIORITY, default=-1): vol.In(SYSLOG_PRIORITY.keys()), } ) def get_service(hass, config, discovery_info=None): """Get the syslog notification service.""" facility = getattr(syslog, SYSLOG_FACILITY[config.get(CONF_FACILITY)]) option = getattr(syslog, SYSLOG_OPTION[config.get(CONF_OPTION)]) priority = getattr(syslog, SYSLOG_PRIORITY[config.get(CONF_PRIORITY)]) return SyslogNotificationService(facility, option, priority) class SyslogNotificationService(BaseNotificationService): """Implement the syslog notification service.""" def __init__(self, facility, option, priority): """Initialize the service.""" self._facility = facility self._option = option self._priority = priority def send_message(self, message="", **kwargs): """Send a message to a user.""" title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT) syslog.openlog(title, self._option, self._facility) syslog.syslog(self._priority, message) syslog.closelog()
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/syslog/notify.py
"""Config flow to configure the RainMachine component.""" from regenmaschine import login from regenmaschine.errors import RainMachineError import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_IP_ADDRESS, CONF_PASSWORD, CONF_PORT, CONF_SCAN_INTERVAL, CONF_SSL, ) from homeassistant.helpers import aiohttp_client from .const import ( # pylint: disable=unused-import CONF_ZONE_RUN_TIME, DEFAULT_PORT, DEFAULT_SCAN_INTERVAL, DEFAULT_ZONE_RUN, DOMAIN, ) class RainMachineFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a RainMachine config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize the config flow.""" self.data_schema = vol.Schema( { vol.Required(CONF_IP_ADDRESS): str, vol.Required(CONF_PASSWORD): str, vol.Optional(CONF_PORT, default=DEFAULT_PORT): int, } ) async def _show_form(self, errors=None): """Show the form to the user.""" return self.async_show_form( step_id="user", data_schema=self.data_schema, errors=errors if errors else {}, ) async def async_step_import(self, import_config): """Import a config entry from configuration.yaml.""" return await self.async_step_user(import_config) async def async_step_user(self, user_input=None): """Handle the start of the config flow.""" if not user_input: return await self._show_form() await self.async_set_unique_id(user_input[CONF_IP_ADDRESS]) self._abort_if_unique_id_configured() websession = aiohttp_client.async_get_clientsession(self.hass) try: await login( user_input[CONF_IP_ADDRESS], user_input[CONF_PASSWORD], websession, port=user_input[CONF_PORT], ssl=user_input.get(CONF_SSL, True), ) except RainMachineError: return await self._show_form({CONF_PASSWORD: "invalid_credentials"}) # Unfortunately, RainMachine doesn't provide a way to refresh the # access token without using the IP address and password, so we have to # store it: return self.async_create_entry( title=user_input[CONF_IP_ADDRESS], data={ CONF_IP_ADDRESS: user_input[CONF_IP_ADDRESS], CONF_PASSWORD: user_input[CONF_PASSWORD], CONF_PORT: user_input[CONF_PORT], CONF_SSL: user_input.get(CONF_SSL, True), CONF_SCAN_INTERVAL: user_input.get( CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL.total_seconds() ), CONF_ZONE_RUN_TIME: user_input.get( CONF_ZONE_RUN_TIME, DEFAULT_ZONE_RUN ), }, )
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/rainmachine/config_flow.py
"""Tracks the latency of a host by sending ICMP echo requests (ping).""" from datetime import timedelta import logging import re import subprocess import sys import voluptuous as vol from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity from homeassistant.const import CONF_HOST, CONF_NAME import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) ATTR_ROUND_TRIP_TIME_AVG = "round_trip_time_avg" ATTR_ROUND_TRIP_TIME_MAX = "round_trip_time_max" ATTR_ROUND_TRIP_TIME_MDEV = "round_trip_time_mdev" ATTR_ROUND_TRIP_TIME_MIN = "round_trip_time_min" CONF_PING_COUNT = "count" DEFAULT_NAME = "Ping Binary sensor" DEFAULT_PING_COUNT = 5 DEFAULT_DEVICE_CLASS = "connectivity" SCAN_INTERVAL = timedelta(minutes=5) PING_MATCHER = re.compile( r"(?P<min>\d+.\d+)\/(?P<avg>\d+.\d+)\/(?P<max>\d+.\d+)\/(?P<mdev>\d+.\d+)" ) PING_MATCHER_BUSYBOX = re.compile( r"(?P<min>\d+.\d+)\/(?P<avg>\d+.\d+)\/(?P<max>\d+.\d+)" ) WIN32_PING_MATCHER = re.compile(r"(?P<min>\d+)ms.+(?P<max>\d+)ms.+(?P<avg>\d+)ms") PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PING_COUNT, default=DEFAULT_PING_COUNT): cv.positive_int, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Ping Binary sensor.""" name = config.get(CONF_NAME) host = config.get(CONF_HOST) count = config.get(CONF_PING_COUNT) add_entities([PingBinarySensor(name, PingData(host, count))], True) class PingBinarySensor(BinarySensorEntity): """Representation of a Ping Binary sensor.""" def __init__(self, name, ping): """Initialize the Ping Binary sensor.""" self._name = name self.ping = ping @property def name(self): """Return the name of the device.""" return self._name @property def device_class(self): """Return the class of this sensor.""" return DEFAULT_DEVICE_CLASS @property def is_on(self): """Return true if the binary sensor is on.""" return self.ping.available @property def device_state_attributes(self): """Return the state attributes of the ICMP checo request.""" if self.ping.data is not False: return { ATTR_ROUND_TRIP_TIME_AVG: self.ping.data["avg"], ATTR_ROUND_TRIP_TIME_MAX: self.ping.data["max"], ATTR_ROUND_TRIP_TIME_MDEV: self.ping.data["mdev"], ATTR_ROUND_TRIP_TIME_MIN: self.ping.data["min"], } def update(self): """Get the latest data.""" self.ping.update() class PingData: """The Class for handling the data retrieval.""" def __init__(self, host, count): """Initialize the data object.""" self._ip_address = host self._count = count self.data = {} self.available = False if sys.platform == "win32": self._ping_cmd = [ "ping", "-n", str(self._count), "-w", "1000", self._ip_address, ] else: self._ping_cmd = [ "ping", "-n", "-q", "-c", str(self._count), "-W1", self._ip_address, ] def ping(self): """Send ICMP echo request and return details if success.""" pinger = subprocess.Popen( self._ping_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) try: out = pinger.communicate() _LOGGER.debug("Output is %s", str(out)) if sys.platform == "win32": match = WIN32_PING_MATCHER.search(str(out).split("\n")[-1]) rtt_min, rtt_avg, rtt_max = match.groups() return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": ""} if "max/" not in str(out): match = PING_MATCHER_BUSYBOX.search(str(out).split("\n")[-1]) rtt_min, rtt_avg, rtt_max = match.groups() return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": ""} match = PING_MATCHER.search(str(out).split("\n")[-1]) rtt_min, rtt_avg, rtt_max, rtt_mdev = match.groups() return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": rtt_mdev} except (subprocess.CalledProcessError, AttributeError): return False def update(self): """Retrieve the latest details from the host.""" self.data = self.ping() self.available = bool(self.data)
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/ping/binary_sensor.py
"""Support for Volvo heater.""" import logging from homeassistant.helpers.entity import ToggleEntity from . import DATA_KEY, VolvoEntity _LOGGER = logging.getLogger(__name__) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up a Volvo switch.""" if discovery_info is None: return async_add_entities([VolvoSwitch(hass.data[DATA_KEY], *discovery_info)]) class VolvoSwitch(VolvoEntity, ToggleEntity): """Representation of a Volvo switch.""" @property def is_on(self): """Return true if switch is on.""" return self.instrument.state async def async_turn_on(self, **kwargs): """Turn the switch on.""" await self.instrument.turn_on() self.async_write_ha_state() async def async_turn_off(self, **kwargs): """Turn the switch off.""" await self.instrument.turn_off() self.async_write_ha_state()
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/volvooncall/switch.py
"""Support for MySensors lights.""" from homeassistant.components import mysensors from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_HS_COLOR, ATTR_WHITE_VALUE, DOMAIN, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_WHITE_VALUE, LightEntity, ) from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import callback import homeassistant.util.color as color_util from homeassistant.util.color import rgb_hex_to_rgb_list SUPPORT_MYSENSORS_RGBW = SUPPORT_COLOR | SUPPORT_WHITE_VALUE async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the mysensors platform for lights.""" device_class_map = { "S_DIMMER": MySensorsLightDimmer, "S_RGB_LIGHT": MySensorsLightRGB, "S_RGBW_LIGHT": MySensorsLightRGBW, } mysensors.setup_mysensors_platform( hass, DOMAIN, discovery_info, device_class_map, async_add_entities=async_add_entities, ) class MySensorsLight(mysensors.device.MySensorsEntity, LightEntity): """Representation of a MySensors Light child node.""" def __init__(self, *args): """Initialize a MySensors Light.""" super().__init__(*args) self._state = None self._brightness = None self._hs = None self._white = None @property def brightness(self): """Return the brightness of this light between 0..255.""" return self._brightness @property def hs_color(self): """Return the hs color value [int, int].""" return self._hs @property def white_value(self): """Return the white value of this light between 0..255.""" return self._white @property def assumed_state(self): """Return true if unable to access real state of entity.""" return self.gateway.optimistic @property def is_on(self): """Return true if device is on.""" return self._state def _turn_on_light(self): """Turn on light child device.""" set_req = self.gateway.const.SetReq if self._state: return self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_LIGHT, 1, ack=1 ) if self.gateway.optimistic: # optimistically assume that light has changed state self._state = True self._values[set_req.V_LIGHT] = STATE_ON def _turn_on_dimmer(self, **kwargs): """Turn on dimmer child device.""" set_req = self.gateway.const.SetReq brightness = self._brightness if ( ATTR_BRIGHTNESS not in kwargs or kwargs[ATTR_BRIGHTNESS] == self._brightness or set_req.V_DIMMER not in self._values ): return brightness = kwargs[ATTR_BRIGHTNESS] percent = round(100 * brightness / 255) self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_DIMMER, percent, ack=1 ) if self.gateway.optimistic: # optimistically assume that light has changed state self._brightness = brightness self._values[set_req.V_DIMMER] = percent def _turn_on_rgb_and_w(self, hex_template, **kwargs): """Turn on RGB or RGBW child device.""" rgb = list(color_util.color_hs_to_RGB(*self._hs)) white = self._white hex_color = self._values.get(self.value_type) hs_color = kwargs.get(ATTR_HS_COLOR) if hs_color is not None: new_rgb = color_util.color_hs_to_RGB(*hs_color) else: new_rgb = None new_white = kwargs.get(ATTR_WHITE_VALUE) if new_rgb is None and new_white is None: return if new_rgb is not None: rgb = list(new_rgb) if hex_template == "%02x%02x%02x%02x": if new_white is not None: rgb.append(new_white) else: rgb.append(white) hex_color = hex_template % tuple(rgb) if len(rgb) > 3: white = rgb.pop() self.gateway.set_child_value( self.node_id, self.child_id, self.value_type, hex_color, ack=1 ) if self.gateway.optimistic: # optimistically assume that light has changed state self._hs = color_util.color_RGB_to_hs(*rgb) self._white = white self._values[self.value_type] = hex_color async def async_turn_off(self, **kwargs): """Turn the device off.""" value_type = self.gateway.const.SetReq.V_LIGHT self.gateway.set_child_value(self.node_id, self.child_id, value_type, 0, ack=1) if self.gateway.optimistic: # optimistically assume that light has changed state self._state = False self._values[value_type] = STATE_OFF self.async_write_ha_state() @callback def _async_update_light(self): """Update the controller with values from light child.""" value_type = self.gateway.const.SetReq.V_LIGHT self._state = self._values[value_type] == STATE_ON @callback def _async_update_dimmer(self): """Update the controller with values from dimmer child.""" value_type = self.gateway.const.SetReq.V_DIMMER if value_type in self._values: self._brightness = round(255 * int(self._values[value_type]) / 100) if self._brightness == 0: self._state = False @callback def _async_update_rgb_or_w(self): """Update the controller with values from RGB or RGBW child.""" value = self._values[self.value_type] color_list = rgb_hex_to_rgb_list(value) if len(color_list) > 3: self._white = color_list.pop() self._hs = color_util.color_RGB_to_hs(*color_list) class MySensorsLightDimmer(MySensorsLight): """Dimmer child class to MySensorsLight.""" @property def supported_features(self): """Flag supported features.""" return SUPPORT_BRIGHTNESS async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) if self.gateway.optimistic: self.async_write_ha_state() async def async_update(self): """Update the controller with the latest value from a sensor.""" await super().async_update() self._async_update_light() self._async_update_dimmer() class MySensorsLightRGB(MySensorsLight): """RGB child class to MySensorsLight.""" @property def supported_features(self): """Flag supported features.""" set_req = self.gateway.const.SetReq if set_req.V_DIMMER in self._values: return SUPPORT_BRIGHTNESS | SUPPORT_COLOR return SUPPORT_COLOR async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) self._turn_on_rgb_and_w("%02x%02x%02x", **kwargs) if self.gateway.optimistic: self.async_write_ha_state() async def async_update(self): """Update the controller with the latest value from a sensor.""" await super().async_update() self._async_update_light() self._async_update_dimmer() self._async_update_rgb_or_w() class MySensorsLightRGBW(MySensorsLightRGB): """RGBW child class to MySensorsLightRGB.""" @property def supported_features(self): """Flag supported features.""" set_req = self.gateway.const.SetReq if set_req.V_DIMMER in self._values: return SUPPORT_BRIGHTNESS | SUPPORT_MYSENSORS_RGBW return SUPPORT_MYSENSORS_RGBW async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) self._turn_on_rgb_and_w("%02x%02x%02x%02x", **kwargs) if self.gateway.optimistic: self.async_write_ha_state()
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/mysensors/light.py
"""An abstract class common to all Bond entities.""" from abc import abstractmethod from asyncio import TimeoutError as AsyncIOTimeoutError import logging from typing import Any, Dict, Optional from aiohttp import ClientError from homeassistant.const import ATTR_NAME from homeassistant.helpers.entity import Entity from .const import DOMAIN from .utils import BondDevice, BondHub _LOGGER = logging.getLogger(__name__) class BondEntity(Entity): """Generic Bond entity encapsulating common features of any Bond controlled device.""" def __init__(self, hub: BondHub, device: BondDevice): """Initialize entity with API and device info.""" self._hub = hub self._device = device self._available = True @property def unique_id(self) -> Optional[str]: """Get unique ID for the entity.""" return self._device.device_id @property def name(self) -> Optional[str]: """Get entity name.""" return self._device.name @property def device_info(self) -> Optional[Dict[str, Any]]: """Get a an HA device representing this Bond controlled device.""" return { ATTR_NAME: self.name, "identifiers": {(DOMAIN, self._device.device_id)}, "via_device": (DOMAIN, self._hub.bond_id), } @property def assumed_state(self) -> bool: """Let HA know this entity relies on an assumed state tracked by Bond.""" return True @property def available(self) -> bool: """Report availability of this entity based on last API call results.""" return self._available async def async_update(self): """Fetch assumed state of the cover from the hub using API.""" try: state: dict = await self._hub.bond.device_state(self._device.device_id) except (ClientError, AsyncIOTimeoutError, OSError) as error: if self._available: _LOGGER.warning( "Entity %s has become unavailable", self.entity_id, exc_info=error ) self._available = False else: if not self._available: _LOGGER.info("Entity %s has come back", self.entity_id) self._available = True self._apply_state(state) @abstractmethod def _apply_state(self, state: dict): raise NotImplementedError
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/bond/entity.py
"""Support for MyChevy.""" from datetime import timedelta import logging import threading import time import mychevy.mychevy as mc import voluptuous as vol from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers import config_validation as cv, discovery from homeassistant.util import Throttle DOMAIN = "mychevy" UPDATE_TOPIC = DOMAIN ERROR_TOPIC = f"{DOMAIN}_error" MYCHEVY_SUCCESS = "success" MYCHEVY_ERROR = "error" NOTIFICATION_ID = "mychevy_website_notification" NOTIFICATION_TITLE = "MyChevy website status" _LOGGER = logging.getLogger(__name__) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=30) ERROR_SLEEP_TIME = timedelta(minutes=30) CONF_COUNTRY = "country" DEFAULT_COUNTRY = "us" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_COUNTRY, default=DEFAULT_COUNTRY): vol.All( cv.string, vol.In(["us", "ca"]) ), } ) }, extra=vol.ALLOW_EXTRA, ) class EVSensorConfig: """The EV sensor configuration.""" def __init__( self, name, attr, unit_of_measurement=None, icon=None, extra_attrs=None ): """Create new sensor configuration.""" self.name = name self.attr = attr self.extra_attrs = extra_attrs or [] self.unit_of_measurement = unit_of_measurement self.icon = icon class EVBinarySensorConfig: """The EV binary sensor configuration.""" def __init__(self, name, attr, device_class=None): """Create new binary sensor configuration.""" self.name = name self.attr = attr self.device_class = device_class def setup(hass, base_config): """Set up the mychevy component.""" config = base_config.get(DOMAIN) email = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) country = config.get(CONF_COUNTRY) hass.data[DOMAIN] = MyChevyHub( mc.MyChevy(email, password, country), hass, base_config ) hass.data[DOMAIN].start() return True class MyChevyHub(threading.Thread): """MyChevy Hub. Connecting to the mychevy website is done through a selenium webscraping process. That can only run synchronously. In order to prevent blocking of other parts of Home Assistant the architecture launches a polling loop in a thread. When new data is received, sensors are updated, and hass is signaled that there are updates. Sensors are not created until the first update, which will be 60 - 120 seconds after the platform starts. """ def __init__(self, client, hass, hass_config): """Initialize MyChevy Hub.""" super().__init__() self._client = client self.hass = hass self.hass_config = hass_config self.cars = [] self.status = None self.ready = False @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Update sensors from mychevy website. This is a synchronous polling call that takes a very long time (like 2 to 3 minutes long time) """ self._client.login() self._client.get_cars() self.cars = self._client.cars if self.ready is not True: discovery.load_platform(self.hass, "sensor", DOMAIN, {}, self.hass_config) discovery.load_platform( self.hass, "binary_sensor", DOMAIN, {}, self.hass_config ) self.ready = True self.cars = self._client.update_cars() def get_car(self, vid): """Compatibility to work with one car.""" if self.cars: for car in self.cars: if car.vid == vid: return car return None def run(self): """Thread run loop.""" # We add the status device first outside of the loop # And then busy wait on threads while True: try: _LOGGER.info("Starting mychevy loop") self.update() self.hass.helpers.dispatcher.dispatcher_send(UPDATE_TOPIC) time.sleep(MIN_TIME_BETWEEN_UPDATES.seconds) except Exception: # pylint: disable=broad-except _LOGGER.exception( "Error updating mychevy data. " "This probably means the OnStar link is down again" ) self.hass.helpers.dispatcher.dispatcher_send(ERROR_TOPIC) time.sleep(ERROR_SLEEP_TIME.seconds)
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/mychevy/__init__.py
"""The BleBox devices integration.""" import asyncio import logging from blebox_uniapi.error import Error from blebox_uniapi.products import Products from blebox_uniapi.session import ApiHost from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.entity import Entity from .const import DEFAULT_SETUP_TIMEOUT, DOMAIN, PRODUCT _LOGGER = logging.getLogger(__name__) PLATFORMS = ["cover", "sensor", "switch", "air_quality", "light", "climate"] PARALLEL_UPDATES = 0 async def async_setup(hass: HomeAssistant, config: dict): """Set up the BleBox devices component.""" return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up BleBox devices from a config entry.""" websession = async_get_clientsession(hass) host = entry.data[CONF_HOST] port = entry.data[CONF_PORT] timeout = DEFAULT_SETUP_TIMEOUT api_host = ApiHost(host, port, timeout, websession, hass.loop) try: product = await Products.async_from_host(api_host) except Error as ex: _LOGGER.error("Identify failed at %s:%d (%s)", api_host.host, api_host.port, ex) raise ConfigEntryNotReady from ex domain = hass.data.setdefault(DOMAIN, {}) domain_entry = domain.setdefault(entry.entry_id, {}) product = domain_entry.setdefault(PRODUCT, product) for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, platform) for platform in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok @callback def create_blebox_entities( hass, config_entry, async_add_entities, entity_klass, entity_type ): """Create entities from a BleBox product's features.""" product = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] entities = [] if entity_type in product.features: for feature in product.features[entity_type]: entities.append(entity_klass(feature)) async_add_entities(entities, True) class BleBoxEntity(Entity): """Implements a common class for entities representing a BleBox feature.""" def __init__(self, feature): """Initialize a BleBox entity.""" self._feature = feature @property def name(self): """Return the internal entity name.""" return self._feature.full_name @property def unique_id(self): """Return a unique id.""" return self._feature.unique_id async def async_update(self): """Update the entity state.""" try: await self._feature.async_update() except Error as ex: _LOGGER.error("Updating '%s' failed: %s", self.name, ex) @property def device_info(self): """Return device information for this entity.""" product = self._feature.product return { "identifiers": {(DOMAIN, product.unique_id)}, "name": product.name, "manufacturer": product.brand, "model": product.model, "sw_version": product.firmware_version, }
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/blebox/__init__.py
"""Support for Z-Wave.""" # pylint: disable=import-outside-toplevel import asyncio import copy from importlib import import_module import logging from pprint import pprint import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( ATTR_ENTITY_ID, EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import CoreState, callback from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import ( async_get_registry as async_get_device_registry, ) from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import generate_entity_id from homeassistant.helpers.entity_component import DEFAULT_SCAN_INTERVAL from homeassistant.helpers.entity_platform import EntityPlatform from homeassistant.helpers.entity_registry import ( async_get_registry as async_get_entity_registry, ) from homeassistant.helpers.entity_values import EntityValues from homeassistant.helpers.event import async_track_time_change from homeassistant.util import convert import homeassistant.util.dt as dt_util from . import config_flow # noqa: F401 pylint: disable=unused-import from . import const, websocket_api as wsapi, workaround from .const import ( CONF_AUTOHEAL, CONF_CONFIG_PATH, CONF_DEBUG, CONF_NETWORK_KEY, CONF_POLLING_INTERVAL, CONF_USB_STICK_PATH, DATA_DEVICES, DATA_ENTITY_VALUES, DATA_NETWORK, DATA_ZWAVE_CONFIG, DEFAULT_CONF_AUTOHEAL, DEFAULT_CONF_USB_STICK_PATH, DEFAULT_DEBUG, DEFAULT_POLLING_INTERVAL, DOMAIN, ) from .discovery_schemas import DISCOVERY_SCHEMAS from .node_entity import ZWaveBaseEntity, ZWaveNodeEntity from .util import ( check_has_unique_id, check_node_schema, check_value_schema, is_node_parsed, node_device_id_and_name, node_name, ) _LOGGER = logging.getLogger(__name__) CLASS_ID = "class_id" ATTR_POWER = "power_consumption" CONF_POLLING_INTENSITY = "polling_intensity" CONF_IGNORED = "ignored" CONF_INVERT_OPENCLOSE_BUTTONS = "invert_openclose_buttons" CONF_INVERT_PERCENT = "invert_percent" CONF_REFRESH_VALUE = "refresh_value" CONF_REFRESH_DELAY = "delay" CONF_DEVICE_CONFIG = "device_config" CONF_DEVICE_CONFIG_GLOB = "device_config_glob" CONF_DEVICE_CONFIG_DOMAIN = "device_config_domain" DEFAULT_CONF_IGNORED = False DEFAULT_CONF_INVERT_OPENCLOSE_BUTTONS = False DEFAULT_CONF_INVERT_PERCENT = False DEFAULT_CONF_REFRESH_VALUE = False DEFAULT_CONF_REFRESH_DELAY = 5 SUPPORTED_PLATFORMS = [ "binary_sensor", "climate", "cover", "fan", "lock", "light", "sensor", "switch", ] RENAME_NODE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_NAME): cv.string, vol.Optional(const.ATTR_UPDATE_IDS, default=False): cv.boolean, } ) RENAME_VALUE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_VALUE_ID): vol.Coerce(int), vol.Required(const.ATTR_NAME): cv.string, vol.Optional(const.ATTR_UPDATE_IDS, default=False): cv.boolean, } ) SET_CONFIG_PARAMETER_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_VALUE): vol.Any(vol.Coerce(int), cv.string), vol.Optional(const.ATTR_CONFIG_SIZE, default=2): vol.Coerce(int), } ) SET_NODE_VALUE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_VALUE_ID): vol.Any(vol.Coerce(int), cv.string), vol.Required(const.ATTR_CONFIG_VALUE): vol.Any(vol.Coerce(int), cv.string), } ) REFRESH_NODE_VALUE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_VALUE_ID): vol.Coerce(int), } ) SET_POLL_INTENSITY_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_VALUE_ID): vol.Coerce(int), vol.Required(const.ATTR_POLL_INTENSITY): vol.Coerce(int), } ) PRINT_CONFIG_PARAMETER_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int), } ) NODE_SERVICE_SCHEMA = vol.Schema({vol.Required(const.ATTR_NODE_ID): vol.Coerce(int)}) REFRESH_ENTITY_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.entity_id}) RESET_NODE_METERS_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Optional(const.ATTR_INSTANCE, default=1): vol.Coerce(int), } ) CHANGE_ASSOCIATION_SCHEMA = vol.Schema( { vol.Required(const.ATTR_ASSOCIATION): cv.string, vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_TARGET_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_GROUP): vol.Coerce(int), vol.Optional(const.ATTR_INSTANCE, default=0x00): vol.Coerce(int), } ) SET_WAKEUP_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_VALUE): vol.All( vol.Coerce(int), cv.positive_int ), } ) HEAL_NODE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Optional(const.ATTR_RETURN_ROUTES, default=False): cv.boolean, } ) TEST_NODE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Optional(const.ATTR_MESSAGES, default=1): cv.positive_int, } ) DEVICE_CONFIG_SCHEMA_ENTRY = vol.Schema( { vol.Optional(CONF_POLLING_INTENSITY): cv.positive_int, vol.Optional(CONF_IGNORED, default=DEFAULT_CONF_IGNORED): cv.boolean, vol.Optional( CONF_INVERT_OPENCLOSE_BUTTONS, default=DEFAULT_CONF_INVERT_OPENCLOSE_BUTTONS ): cv.boolean, vol.Optional( CONF_INVERT_PERCENT, default=DEFAULT_CONF_INVERT_PERCENT ): cv.boolean, vol.Optional( CONF_REFRESH_VALUE, default=DEFAULT_CONF_REFRESH_VALUE ): cv.boolean, vol.Optional( CONF_REFRESH_DELAY, default=DEFAULT_CONF_REFRESH_DELAY ): cv.positive_int, } ) SIGNAL_REFRESH_ENTITY_FORMAT = "zwave_refresh_entity_{}" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional(CONF_AUTOHEAL, default=DEFAULT_CONF_AUTOHEAL): cv.boolean, vol.Optional(CONF_CONFIG_PATH): cv.string, vol.Optional(CONF_NETWORK_KEY): vol.All( cv.string, vol.Match(r"(0x\w\w,\s?){15}0x\w\w") ), vol.Optional(CONF_DEVICE_CONFIG, default={}): vol.Schema( {cv.entity_id: DEVICE_CONFIG_SCHEMA_ENTRY} ), vol.Optional(CONF_DEVICE_CONFIG_GLOB, default={}): vol.Schema( {cv.string: DEVICE_CONFIG_SCHEMA_ENTRY} ), vol.Optional(CONF_DEVICE_CONFIG_DOMAIN, default={}): vol.Schema( {cv.string: DEVICE_CONFIG_SCHEMA_ENTRY} ), vol.Optional(CONF_DEBUG, default=DEFAULT_DEBUG): cv.boolean, vol.Optional( CONF_POLLING_INTERVAL, default=DEFAULT_POLLING_INTERVAL ): cv.positive_int, vol.Optional(CONF_USB_STICK_PATH): cv.string, } ) }, extra=vol.ALLOW_EXTRA, ) def _obj_to_dict(obj): """Convert an object into a hash for debug.""" return { key: getattr(obj, key) for key in dir(obj) if key[0] != "_" and not callable(getattr(obj, key)) } def _value_name(value): """Return the name of the value.""" return f"{node_name(value.node)} {value.label}".strip() def nice_print_node(node): """Print a nice formatted node to the output (debug method).""" node_dict = _obj_to_dict(node) node_dict["values"] = { value_id: _obj_to_dict(value) for value_id, value in node.values.items() } _LOGGER.info("FOUND NODE %s \n%s", node.product_name, node_dict) def get_config_value(node, value_index, tries=5): """Return the current configuration value for a specific index.""" try: for value in node.values.values(): if ( value.command_class == const.COMMAND_CLASS_CONFIGURATION and value.index == value_index ): return value.data except RuntimeError: # If we get a runtime error the dict has changed while # we was looking for a value, just do it again return ( None if tries <= 0 else get_config_value(node, value_index, tries=tries - 1) ) return None async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Z-Wave platform (generic part).""" if discovery_info is None or DATA_NETWORK not in hass.data: return False device = hass.data[DATA_DEVICES].get(discovery_info[const.DISCOVERY_DEVICE]) if device is None: return False async_add_entities([device]) return True async def async_setup(hass, config): """Set up Z-Wave components.""" if DOMAIN not in config: return True conf = config[DOMAIN] hass.data[DATA_ZWAVE_CONFIG] = conf if not hass.config_entries.async_entries(DOMAIN): hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={ CONF_USB_STICK_PATH: conf.get( CONF_USB_STICK_PATH, DEFAULT_CONF_USB_STICK_PATH ), CONF_NETWORK_KEY: conf.get(CONF_NETWORK_KEY), }, ) ) return True async def async_setup_entry(hass, config_entry): """Set up Z-Wave from a config entry. Will automatically load components to support devices found on the network. """ from pydispatch import dispatcher # pylint: disable=import-error from openzwave.option import ZWaveOption from openzwave.network import ZWaveNetwork from openzwave.group import ZWaveGroup # Merge config entry and yaml config config = config_entry.data if DATA_ZWAVE_CONFIG in hass.data: config = {**config, **hass.data[DATA_ZWAVE_CONFIG]} # Update hass.data with merged config so we can access it elsewhere hass.data[DATA_ZWAVE_CONFIG] = config # Load configuration use_debug = config.get(CONF_DEBUG, DEFAULT_DEBUG) autoheal = config.get(CONF_AUTOHEAL, DEFAULT_CONF_AUTOHEAL) device_config = EntityValues( config.get(CONF_DEVICE_CONFIG), config.get(CONF_DEVICE_CONFIG_DOMAIN), config.get(CONF_DEVICE_CONFIG_GLOB), ) usb_path = config[CONF_USB_STICK_PATH] _LOGGER.info("Z-Wave USB path is %s", usb_path) # Setup options options = ZWaveOption( usb_path, user_path=hass.config.config_dir, config_path=config.get(CONF_CONFIG_PATH), ) options.set_console_output(use_debug) if config.get(CONF_NETWORK_KEY): options.addOption("NetworkKey", config[CONF_NETWORK_KEY]) await hass.async_add_executor_job(options.lock) network = hass.data[DATA_NETWORK] = ZWaveNetwork(options, autostart=False) hass.data[DATA_DEVICES] = {} hass.data[DATA_ENTITY_VALUES] = [] registry = await async_get_entity_registry(hass) wsapi.async_load_websocket_api(hass) if use_debug: # pragma: no cover def log_all(signal, value=None): """Log all the signals.""" print("") print("SIGNAL *****", signal) if value and signal in ( ZWaveNetwork.SIGNAL_VALUE_CHANGED, ZWaveNetwork.SIGNAL_VALUE_ADDED, ZWaveNetwork.SIGNAL_SCENE_EVENT, ZWaveNetwork.SIGNAL_NODE_EVENT, ZWaveNetwork.SIGNAL_AWAKE_NODES_QUERIED, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED_SOME_DEAD, ): pprint(_obj_to_dict(value)) print("") dispatcher.connect(log_all, weak=False) def value_added(node, value): """Handle new added value to a node on the network.""" # Check if this value should be tracked by an existing entity for values in hass.data[DATA_ENTITY_VALUES]: values.check_value(value) for schema in DISCOVERY_SCHEMAS: if not check_node_schema(node, schema): continue if not check_value_schema( value, schema[const.DISC_VALUES][const.DISC_PRIMARY] ): continue values = ZWaveDeviceEntityValues( hass, schema, value, config, device_config, registry ) # We create a new list and update the reference here so that # the list can be safely iterated over in the main thread new_values = hass.data[DATA_ENTITY_VALUES] + [values] hass.data[DATA_ENTITY_VALUES] = new_values platform = EntityPlatform( hass=hass, logger=_LOGGER, domain=DOMAIN, platform_name=DOMAIN, platform=None, scan_interval=DEFAULT_SCAN_INTERVAL, entity_namespace=None, ) platform.config_entry = config_entry def node_added(node): """Handle a new node on the network.""" entity = ZWaveNodeEntity(node, network) async def _add_node_to_component(): if hass.data[DATA_DEVICES].get(entity.unique_id): return name = node_name(node) generated_id = generate_entity_id(DOMAIN + ".{}", name, []) node_config = device_config.get(generated_id) if node_config.get(CONF_IGNORED): _LOGGER.info( "Ignoring node entity %s due to device settings", generated_id ) return hass.data[DATA_DEVICES][entity.unique_id] = entity await platform.async_add_entities([entity]) if entity.unique_id: hass.async_add_job(_add_node_to_component()) return @callback def _on_ready(sec): _LOGGER.info("Z-Wave node %d ready after %d seconds", entity.node_id, sec) hass.async_add_job(_add_node_to_component) @callback def _on_timeout(sec): _LOGGER.warning( "Z-Wave node %d not ready after %d seconds, continuing anyway", entity.node_id, sec, ) hass.async_add_job(_add_node_to_component) hass.add_job(check_has_unique_id, entity, _on_ready, _on_timeout) def node_removed(node): node_id = node.node_id node_key = f"node-{node_id}" for key in list(hass.data[DATA_DEVICES]): if key is None: continue if not key.startswith(f"{node_id}-"): continue entity = hass.data[DATA_DEVICES][key] _LOGGER.debug( "Removing Entity - value: %s - entity_id: %s", key, entity.entity_id ) hass.add_job(entity.node_removed()) del hass.data[DATA_DEVICES][key] entity = hass.data[DATA_DEVICES][node_key] hass.add_job(entity.node_removed()) del hass.data[DATA_DEVICES][node_key] hass.add_job(_remove_device(node)) async def _remove_device(node): dev_reg = await async_get_device_registry(hass) identifier, name = node_device_id_and_name(node) device = dev_reg.async_get_device(identifiers={identifier}, connections=set()) if device is not None: _LOGGER.debug("Removing Device - %s - %s", device.id, name) dev_reg.async_remove_device(device.id) def network_ready(): """Handle the query of all awake nodes.""" _LOGGER.info( "Z-Wave network is ready for use. All awake nodes " "have been queried. Sleeping nodes will be " "queried when they awake" ) hass.bus.fire(const.EVENT_NETWORK_READY) def network_complete(): """Handle the querying of all nodes on network.""" _LOGGER.info( "Z-Wave network is complete. All nodes on the network have been queried" ) hass.bus.fire(const.EVENT_NETWORK_COMPLETE) def network_complete_some_dead(): """Handle the querying of all nodes on network.""" _LOGGER.info( "Z-Wave network is complete. All nodes on the network " "have been queried, but some nodes are marked dead" ) hass.bus.fire(const.EVENT_NETWORK_COMPLETE_SOME_DEAD) dispatcher.connect(value_added, ZWaveNetwork.SIGNAL_VALUE_ADDED, weak=False) dispatcher.connect(node_added, ZWaveNetwork.SIGNAL_NODE_ADDED, weak=False) dispatcher.connect(node_removed, ZWaveNetwork.SIGNAL_NODE_REMOVED, weak=False) dispatcher.connect( network_ready, ZWaveNetwork.SIGNAL_AWAKE_NODES_QUERIED, weak=False ) dispatcher.connect( network_complete, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED, weak=False ) dispatcher.connect( network_complete_some_dead, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED_SOME_DEAD, weak=False, ) def add_node(service): """Switch into inclusion mode.""" _LOGGER.info("Z-Wave add_node have been initialized") network.controller.add_node() def add_node_secure(service): """Switch into secure inclusion mode.""" _LOGGER.info("Z-Wave add_node_secure have been initialized") network.controller.add_node(True) def remove_node(service): """Switch into exclusion mode.""" _LOGGER.info("Z-Wave remove_node have been initialized") network.controller.remove_node() def cancel_command(service): """Cancel a running controller command.""" _LOGGER.info("Cancel running Z-Wave command") network.controller.cancel_command() def heal_network(service): """Heal the network.""" _LOGGER.info("Z-Wave heal running") network.heal() def soft_reset(service): """Soft reset the controller.""" _LOGGER.info("Z-Wave soft_reset have been initialized") network.controller.soft_reset() def test_network(service): """Test the network by sending commands to all the nodes.""" _LOGGER.info("Z-Wave test_network have been initialized") network.test() def stop_network(_service_or_event): """Stop Z-Wave network.""" _LOGGER.info("Stopping Z-Wave network") network.stop() if hass.state == CoreState.running: hass.bus.fire(const.EVENT_NETWORK_STOP) async def rename_node(service): """Rename a node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] name = service.data.get(const.ATTR_NAME) node.name = name _LOGGER.info("Renamed Z-Wave node %d to %s", node_id, name) update_ids = service.data.get(const.ATTR_UPDATE_IDS) # We want to rename the device, the node entity, # and all the contained entities node_key = f"node-{node_id}" entity = hass.data[DATA_DEVICES][node_key] await entity.node_renamed(update_ids) for key in list(hass.data[DATA_DEVICES]): if not key.startswith(f"{node_id}-"): continue entity = hass.data[DATA_DEVICES][key] await entity.value_renamed(update_ids) async def rename_value(service): """Rename a node value.""" node_id = service.data.get(const.ATTR_NODE_ID) value_id = service.data.get(const.ATTR_VALUE_ID) node = network.nodes[node_id] value = node.values[value_id] name = service.data.get(const.ATTR_NAME) value.label = name _LOGGER.info( "Renamed Z-Wave value (Node %d Value %d) to %s", node_id, value_id, name ) update_ids = service.data.get(const.ATTR_UPDATE_IDS) value_key = f"{node_id}-{value_id}" entity = hass.data[DATA_DEVICES][value_key] await entity.value_renamed(update_ids) def set_poll_intensity(service): """Set the polling intensity of a node value.""" node_id = service.data.get(const.ATTR_NODE_ID) value_id = service.data.get(const.ATTR_VALUE_ID) node = network.nodes[node_id] value = node.values[value_id] intensity = service.data.get(const.ATTR_POLL_INTENSITY) if intensity == 0: if value.disable_poll(): _LOGGER.info("Polling disabled (Node %d Value %d)", node_id, value_id) return _LOGGER.info( "Polling disabled failed (Node %d Value %d)", node_id, value_id ) else: if value.enable_poll(intensity): _LOGGER.info( "Set polling intensity (Node %d Value %d) to %s", node_id, value_id, intensity, ) return _LOGGER.info( "Set polling intensity failed (Node %d Value %d)", node_id, value_id ) def remove_failed_node(service): """Remove failed node.""" node_id = service.data.get(const.ATTR_NODE_ID) _LOGGER.info("Trying to remove zwave node %d", node_id) network.controller.remove_failed_node(node_id) def replace_failed_node(service): """Replace failed node.""" node_id = service.data.get(const.ATTR_NODE_ID) _LOGGER.info("Trying to replace zwave node %d", node_id) network.controller.replace_failed_node(node_id) def set_config_parameter(service): """Set a config parameter to a node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] param = service.data.get(const.ATTR_CONFIG_PARAMETER) selection = service.data.get(const.ATTR_CONFIG_VALUE) size = service.data.get(const.ATTR_CONFIG_SIZE) for value in node.get_values( class_id=const.COMMAND_CLASS_CONFIGURATION ).values(): if value.index != param: continue if value.type == const.TYPE_BOOL: value.data = int(selection == "True") _LOGGER.info( "Setting configuration parameter %s on Node %s with bool selection %s", param, node_id, str(selection), ) return if value.type == const.TYPE_LIST: value.data = str(selection) _LOGGER.info( "Setting configuration parameter %s on Node %s with list selection %s", param, node_id, str(selection), ) return if value.type == const.TYPE_BUTTON: network.manager.pressButton(value.value_id) network.manager.releaseButton(value.value_id) _LOGGER.info( "Setting configuration parameter %s on Node %s " "with button selection %s", param, node_id, selection, ) return value.data = int(selection) _LOGGER.info( "Setting configuration parameter %s on Node %s with selection %s", param, node_id, selection, ) return node.set_config_param(param, selection, size) _LOGGER.info( "Setting unknown configuration parameter %s on Node %s with selection %s", param, node_id, selection, ) def refresh_node_value(service): """Refresh the specified value from a node.""" node_id = service.data.get(const.ATTR_NODE_ID) value_id = service.data.get(const.ATTR_VALUE_ID) node = network.nodes[node_id] node.values[value_id].refresh() _LOGGER.info("Node %s value %s refreshed", node_id, value_id) def set_node_value(service): """Set the specified value on a node.""" node_id = service.data.get(const.ATTR_NODE_ID) value_id = service.data.get(const.ATTR_VALUE_ID) value = service.data.get(const.ATTR_CONFIG_VALUE) node = network.nodes[node_id] node.values[value_id].data = value _LOGGER.info("Node %s value %s set to %s", node_id, value_id, value) def print_config_parameter(service): """Print a config parameter from a node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] param = service.data.get(const.ATTR_CONFIG_PARAMETER) _LOGGER.info( "Config parameter %s on Node %s: %s", param, node_id, get_config_value(node, param), ) def print_node(service): """Print all information about z-wave node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] nice_print_node(node) def set_wakeup(service): """Set wake-up interval of a node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] value = service.data.get(const.ATTR_CONFIG_VALUE) if node.can_wake_up(): for value_id in node.get_values(class_id=const.COMMAND_CLASS_WAKE_UP): node.values[value_id].data = value _LOGGER.info("Node %s wake-up set to %d", node_id, value) else: _LOGGER.info("Node %s is not wakeable", node_id) def change_association(service): """Change an association in the zwave network.""" association_type = service.data.get(const.ATTR_ASSOCIATION) node_id = service.data.get(const.ATTR_NODE_ID) target_node_id = service.data.get(const.ATTR_TARGET_NODE_ID) group = service.data.get(const.ATTR_GROUP) instance = service.data.get(const.ATTR_INSTANCE) node = ZWaveGroup(group, network, node_id) if association_type == "add": node.add_association(target_node_id, instance) _LOGGER.info( "Adding association for node:%s in group:%s " "target node:%s, instance=%s", node_id, group, target_node_id, instance, ) if association_type == "remove": node.remove_association(target_node_id, instance) _LOGGER.info( "Removing association for node:%s in group:%s " "target node:%s, instance=%s", node_id, group, target_node_id, instance, ) async def async_refresh_entity(service): """Refresh values that specific entity depends on.""" entity_id = service.data.get(ATTR_ENTITY_ID) async_dispatcher_send(hass, SIGNAL_REFRESH_ENTITY_FORMAT.format(entity_id)) def refresh_node(service): """Refresh all node info.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] node.refresh_info() def reset_node_meters(service): """Reset meter counters of a node.""" node_id = service.data.get(const.ATTR_NODE_ID) instance = service.data.get(const.ATTR_INSTANCE) node = network.nodes[node_id] for value in node.get_values(class_id=const.COMMAND_CLASS_METER).values(): if value.index != const.INDEX_METER_RESET: continue if value.instance != instance: continue network.manager.pressButton(value.value_id) network.manager.releaseButton(value.value_id) _LOGGER.info( "Resetting meters on node %s instance %s....", node_id, instance ) return _LOGGER.info( "Node %s on instance %s does not have resettable meters", node_id, instance ) def heal_node(service): """Heal a node on the network.""" node_id = service.data.get(const.ATTR_NODE_ID) update_return_routes = service.data.get(const.ATTR_RETURN_ROUTES) node = network.nodes[node_id] _LOGGER.info("Z-Wave node heal running for node %s", node_id) node.heal(update_return_routes) def test_node(service): """Send test messages to a node on the network.""" node_id = service.data.get(const.ATTR_NODE_ID) messages = service.data.get(const.ATTR_MESSAGES) node = network.nodes[node_id] _LOGGER.info("Sending %s test-messages to node %s", messages, node_id) node.test(messages) def start_zwave(_service_or_event): """Startup Z-Wave network.""" _LOGGER.info("Starting Z-Wave network...") network.start() hass.bus.fire(const.EVENT_NETWORK_START) async def _check_awaked(): """Wait for Z-wave awaked state (or timeout) and finalize start.""" _LOGGER.debug("network state: %d %s", network.state, network.state_str) start_time = dt_util.utcnow() while True: waited = int((dt_util.utcnow() - start_time).total_seconds()) if network.state >= network.STATE_AWAKED: # Need to be in STATE_AWAKED before talking to nodes. _LOGGER.info("Z-Wave ready after %d seconds", waited) break if waited >= const.NETWORK_READY_WAIT_SECS: # Wait up to NETWORK_READY_WAIT_SECS seconds for the Z-Wave # network to be ready. _LOGGER.warning( "Z-Wave not ready after %d seconds, continuing anyway", waited ) _LOGGER.info( "final network state: %d %s", network.state, network.state_str ) break await asyncio.sleep(1) hass.async_add_job(_finalize_start) hass.add_job(_check_awaked) def _finalize_start(): """Perform final initializations after Z-Wave network is awaked.""" polling_interval = convert(config.get(CONF_POLLING_INTERVAL), int) if polling_interval is not None: network.set_poll_interval(polling_interval, False) poll_interval = network.get_poll_interval() _LOGGER.info("Z-Wave polling interval set to %d ms", poll_interval) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_network) # Register node services for Z-Wave network hass.services.register(DOMAIN, const.SERVICE_ADD_NODE, add_node) hass.services.register(DOMAIN, const.SERVICE_ADD_NODE_SECURE, add_node_secure) hass.services.register(DOMAIN, const.SERVICE_REMOVE_NODE, remove_node) hass.services.register(DOMAIN, const.SERVICE_CANCEL_COMMAND, cancel_command) hass.services.register(DOMAIN, const.SERVICE_HEAL_NETWORK, heal_network) hass.services.register(DOMAIN, const.SERVICE_SOFT_RESET, soft_reset) hass.services.register(DOMAIN, const.SERVICE_TEST_NETWORK, test_network) hass.services.register(DOMAIN, const.SERVICE_STOP_NETWORK, stop_network) hass.services.register( DOMAIN, const.SERVICE_RENAME_NODE, rename_node, schema=RENAME_NODE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_RENAME_VALUE, rename_value, schema=RENAME_VALUE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_SET_CONFIG_PARAMETER, set_config_parameter, schema=SET_CONFIG_PARAMETER_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_SET_NODE_VALUE, set_node_value, schema=SET_NODE_VALUE_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_REFRESH_NODE_VALUE, refresh_node_value, schema=REFRESH_NODE_VALUE_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_PRINT_CONFIG_PARAMETER, print_config_parameter, schema=PRINT_CONFIG_PARAMETER_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_REMOVE_FAILED_NODE, remove_failed_node, schema=NODE_SERVICE_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_REPLACE_FAILED_NODE, replace_failed_node, schema=NODE_SERVICE_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_CHANGE_ASSOCIATION, change_association, schema=CHANGE_ASSOCIATION_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_SET_WAKEUP, set_wakeup, schema=SET_WAKEUP_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_PRINT_NODE, print_node, schema=NODE_SERVICE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_REFRESH_ENTITY, async_refresh_entity, schema=REFRESH_ENTITY_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_REFRESH_NODE, refresh_node, schema=NODE_SERVICE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_RESET_NODE_METERS, reset_node_meters, schema=RESET_NODE_METERS_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_SET_POLL_INTENSITY, set_poll_intensity, schema=SET_POLL_INTENSITY_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_HEAL_NODE, heal_node, schema=HEAL_NODE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_TEST_NODE, test_node, schema=TEST_NODE_SCHEMA ) # Setup autoheal if autoheal: _LOGGER.info("Z-Wave network autoheal is enabled") async_track_time_change(hass, heal_network, hour=0, minute=0, second=0) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_zwave) hass.services.async_register(DOMAIN, const.SERVICE_START_NETWORK, start_zwave) for entry_component in SUPPORTED_PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, entry_component) ) return True class ZWaveDeviceEntityValues: """Manages entity access to the underlying zwave value objects.""" def __init__( self, hass, schema, primary_value, zwave_config, device_config, registry ): """Initialize the values object with the passed entity schema.""" self._hass = hass self._zwave_config = zwave_config self._device_config = device_config self._schema = copy.deepcopy(schema) self._values = {} self._entity = None self._workaround_ignore = False self._registry = registry for name in self._schema[const.DISC_VALUES].keys(): self._values[name] = None self._schema[const.DISC_VALUES][name][const.DISC_INSTANCE] = [ primary_value.instance ] self._values[const.DISC_PRIMARY] = primary_value self._node = primary_value.node self._schema[const.DISC_NODE_ID] = [self._node.node_id] # Check values that have already been discovered for node for value in self._node.values.values(): self.check_value(value) self._check_entity_ready() def __getattr__(self, name): """Get the specified value for this entity.""" return self._values[name] def __iter__(self): """Allow iteration over all values.""" return iter(self._values.values()) def check_value(self, value): """Check if the new value matches a missing value for this entity. If a match is found, it is added to the values mapping. """ if not check_node_schema(value.node, self._schema): return for name in self._values: if self._values[name] is not None: continue if not check_value_schema(value, self._schema[const.DISC_VALUES][name]): continue self._values[name] = value if self._entity: self._entity.value_added() self._entity.value_changed() self._check_entity_ready() def _check_entity_ready(self): """Check if all required values are discovered and create entity.""" if self._workaround_ignore: return if self._entity is not None: return for name in self._schema[const.DISC_VALUES]: if self._values[name] is None and not self._schema[const.DISC_VALUES][ name ].get(const.DISC_OPTIONAL): return component = self._schema[const.DISC_COMPONENT] workaround_component = workaround.get_device_component_mapping(self.primary) if workaround_component and workaround_component != component: if workaround_component == workaround.WORKAROUND_IGNORE: _LOGGER.info( "Ignoring Node %d Value %d due to workaround", self.primary.node.node_id, self.primary.value_id, ) # No entity will be created for this value self._workaround_ignore = True return _LOGGER.debug("Using %s instead of %s", workaround_component, component) component = workaround_component entity_id = self._registry.async_get_entity_id( component, DOMAIN, compute_value_unique_id(self._node, self.primary) ) if entity_id is None: value_name = _value_name(self.primary) entity_id = generate_entity_id(component + ".{}", value_name, []) node_config = self._device_config.get(entity_id) # Configure node _LOGGER.debug( "Adding Node_id=%s Generic_command_class=%s, " "Specific_command_class=%s, " "Command_class=%s, Value type=%s, " "Genre=%s as %s", self._node.node_id, self._node.generic, self._node.specific, self.primary.command_class, self.primary.type, self.primary.genre, component, ) if node_config.get(CONF_IGNORED): _LOGGER.info("Ignoring entity %s due to device settings", entity_id) # No entity will be created for this value self._workaround_ignore = True return polling_intensity = convert(node_config.get(CONF_POLLING_INTENSITY), int) if polling_intensity: self.primary.enable_poll(polling_intensity) platform = import_module(f".{component}", __name__) device = platform.get_device( node=self._node, values=self, node_config=node_config, hass=self._hass ) if device is None: # No entity will be created for this value self._workaround_ignore = True return self._entity = device @callback def _on_ready(sec): _LOGGER.info( "Z-Wave entity %s (node_id: %d) ready after %d seconds", device.name, self._node.node_id, sec, ) self._hass.async_add_job(discover_device, component, device) @callback def _on_timeout(sec): _LOGGER.warning( "Z-Wave entity %s (node_id: %d) not ready after %d seconds, " "continuing anyway", device.name, self._node.node_id, sec, ) self._hass.async_add_job(discover_device, component, device) async def discover_device(component, device): """Put device in a dictionary and call discovery on it.""" if self._hass.data[DATA_DEVICES].get(device.unique_id): return self._hass.data[DATA_DEVICES][device.unique_id] = device if component in SUPPORTED_PLATFORMS: async_dispatcher_send(self._hass, f"zwave_new_{component}", device) else: await discovery.async_load_platform( self._hass, component, DOMAIN, {const.DISCOVERY_DEVICE: device.unique_id}, self._zwave_config, ) if device.unique_id: self._hass.add_job(discover_device, component, device) else: self._hass.add_job(check_has_unique_id, device, _on_ready, _on_timeout) class ZWaveDeviceEntity(ZWaveBaseEntity): """Representation of a Z-Wave node entity.""" def __init__(self, values, domain): """Initialize the z-Wave device.""" # pylint: disable=import-error super().__init__() from openzwave.network import ZWaveNetwork from pydispatch import dispatcher self.values = values self.node = values.primary.node self.values.primary.set_change_verified(False) self._name = _value_name(self.values.primary) self._unique_id = self._compute_unique_id() self._update_attributes() dispatcher.connect( self.network_value_changed, ZWaveNetwork.SIGNAL_VALUE_CHANGED ) def network_value_changed(self, value): """Handle a value change on the network.""" if value.value_id in [v.value_id for v in self.values if v]: return self.value_changed() def value_added(self): """Handle a new value of this entity.""" def value_changed(self): """Handle a changed value for this entity's node.""" self._update_attributes() self.update_properties() self.maybe_schedule_update() async def value_renamed(self, update_ids=False): """Rename the node and update any IDs.""" self._name = _value_name(self.values.primary) if update_ids: # Update entity ID. ent_reg = await async_get_entity_registry(self.hass) new_entity_id = ent_reg.async_generate_entity_id( self.platform.domain, self._name, self.platform.entities.keys() - {self.entity_id}, ) if new_entity_id != self.entity_id: # Don't change the name attribute, it will be None unless # customised and if it's been customised, keep the # customisation. ent_reg.async_update_entity(self.entity_id, new_entity_id=new_entity_id) return # else for the above two ifs, update if not using update_entity self.async_write_ha_state() async def async_added_to_hass(self): """Add device to dict.""" async_dispatcher_connect( self.hass, SIGNAL_REFRESH_ENTITY_FORMAT.format(self.entity_id), self.refresh_from_network, ) def _update_attributes(self): """Update the node attributes. May only be used inside callback.""" self.node_id = self.node.node_id self._name = _value_name(self.values.primary) if not self._unique_id: self._unique_id = self._compute_unique_id() if self._unique_id: self.try_remove_and_add() if self.values.power: self.power_consumption = round( self.values.power.data, self.values.power.precision ) else: self.power_consumption = None def update_properties(self): """Update on data changes for node values.""" @property def should_poll(self): """No polling needed.""" return False @property def unique_id(self): """Return a unique ID.""" return self._unique_id @property def device_info(self): """Return device information.""" identifier, name = node_device_id_and_name( self.node, self.values.primary.instance ) info = { "name": name, "identifiers": {identifier}, "manufacturer": self.node.manufacturer_name, "model": self.node.product_name, } if self.values.primary.instance > 1: info["via_device"] = (DOMAIN, self.node_id) elif self.node_id > 1: info["via_device"] = (DOMAIN, 1) return info @property def name(self): """Return the name of the device.""" return self._name @property def device_state_attributes(self): """Return the device specific state attributes.""" attrs = { const.ATTR_NODE_ID: self.node_id, const.ATTR_VALUE_INDEX: self.values.primary.index, const.ATTR_VALUE_INSTANCE: self.values.primary.instance, const.ATTR_VALUE_ID: str(self.values.primary.value_id), } if self.power_consumption is not None: attrs[ATTR_POWER] = self.power_consumption return attrs def refresh_from_network(self): """Refresh all dependent values from zwave network.""" for value in self.values: if value is not None: self.node.refresh_value(value.value_id) def _compute_unique_id(self): if ( is_node_parsed(self.node) and self.values.primary.label != "Unknown" ) or self.node.is_ready: return compute_value_unique_id(self.node, self.values.primary) return None def compute_value_unique_id(node, value): """Compute unique_id a value would get if it were to get one.""" return f"{node.node_id}-{value.object_id}"
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/zwave/__init__.py
"""The template component.""" from itertools import chain import logging from homeassistant.const import MATCH_ALL _LOGGER = logging.getLogger(__name__) def initialise_templates(hass, templates, attribute_templates=None): """Initialise templates and attribute templates.""" if attribute_templates is None: attribute_templates = {} for template in chain(templates.values(), attribute_templates.values()): if template is None: continue template.hass = hass def extract_entities( device_name, device_type, manual_entity_ids, templates, attribute_templates=None ): """Extract entity ids from templates and attribute templates.""" if attribute_templates is None: attribute_templates = {} entity_ids = set() if manual_entity_ids is None: invalid_templates = [] for template_name, template in chain( templates.items(), attribute_templates.items() ): if template is None: continue template_entity_ids = template.extract_entities() if template_entity_ids != MATCH_ALL: entity_ids |= set(template_entity_ids) else: invalid_templates.append(template_name.replace("_template", "")) entity_ids = list(entity_ids) if invalid_templates: if not entity_ids: entity_ids = MATCH_ALL _LOGGER.warning( "Template %s '%s' has no entity ids configured to track nor" " were we able to extract the entities to track from the %s " "template(s). This entity will only be able to be updated " "manually", device_type, device_name, ", ".join(invalid_templates), ) else: entity_ids = manual_entity_ids return entity_ids
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/template/__init__.py
"""Support for Niko Home Control.""" from datetime import timedelta import logging import nikohomecontrol import voluptuous as vol # Import the device class from the component that you want to support from homeassistant.components.light import ATTR_BRIGHTNESS, PLATFORM_SCHEMA, LightEntity from homeassistant.const import CONF_HOST from homeassistant.exceptions import PlatformNotReady import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1) SCAN_INTERVAL = timedelta(seconds=30) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_HOST): cv.string}) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Niko Home Control light platform.""" host = config[CONF_HOST] try: nhc = nikohomecontrol.NikoHomeControl( {"ip": host, "port": 8000, "timeout": 20000} ) niko_data = NikoHomeControlData(hass, nhc) await niko_data.async_update() except OSError as err: _LOGGER.error("Unable to access %s (%s)", host, err) raise PlatformNotReady async_add_entities( [NikoHomeControlLight(light, niko_data) for light in nhc.list_actions()], True ) class NikoHomeControlLight(LightEntity): """Representation of an Niko Light.""" def __init__(self, light, data): """Set up the Niko Home Control light platform.""" self._data = data self._light = light self._unique_id = f"light-{light.id}" self._name = light.name self._state = light.is_on self._brightness = None @property def unique_id(self): """Return unique ID for light.""" return self._unique_id @property def name(self): """Return the display name of this light.""" return self._name @property def brightness(self): """Return the brightness of the light.""" return self._brightness @property def is_on(self): """Return true if light is on.""" return self._state def turn_on(self, **kwargs): """Instruct the light to turn on.""" self._light.brightness = kwargs.get(ATTR_BRIGHTNESS, 255) _LOGGER.debug("Turn on: %s", self.name) self._light.turn_on() def turn_off(self, **kwargs): """Instruct the light to turn off.""" _LOGGER.debug("Turn off: %s", self.name) self._light.turn_off() async def async_update(self): """Get the latest data from NikoHomeControl API.""" await self._data.async_update() self._state = self._data.get_state(self._light.id) class NikoHomeControlData: """The class for handling data retrieval.""" def __init__(self, hass, nhc): """Set up Niko Home Control Data object.""" self._nhc = nhc self.hass = hass self.available = True self.data = {} self._system_info = None @Throttle(MIN_TIME_BETWEEN_UPDATES) async def async_update(self): """Get the latest data from the NikoHomeControl API.""" _LOGGER.debug("Fetching async state in bulk") try: self.data = await self.hass.async_add_executor_job( self._nhc.list_actions_raw ) self.available = True except OSError as ex: _LOGGER.error("Unable to retrieve data from Niko, %s", str(ex)) self.available = False def get_state(self, aid): """Find and filter state based on action id.""" for state in self.data: if state["id"] == aid: return state["value1"] != 0 _LOGGER.error("Failed to retrieve state off unknown light")
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/niko_home_control/light.py
"""Component that will help set the Microsoft face for verify processing.""" import logging import voluptuous as vol from homeassistant.components.image_processing import ( ATTR_CONFIDENCE, CONF_CONFIDENCE, CONF_ENTITY_ID, CONF_NAME, CONF_SOURCE, PLATFORM_SCHEMA, ImageProcessingFaceEntity, ) from homeassistant.components.microsoft_face import DATA_MICROSOFT_FACE from homeassistant.const import ATTR_NAME from homeassistant.core import split_entity_id from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_GROUP = "group" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_GROUP): cv.slugify}) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Microsoft Face identify platform.""" api = hass.data[DATA_MICROSOFT_FACE] face_group = config[CONF_GROUP] confidence = config[CONF_CONFIDENCE] entities = [] for camera in config[CONF_SOURCE]: entities.append( MicrosoftFaceIdentifyEntity( camera[CONF_ENTITY_ID], api, face_group, confidence, camera.get(CONF_NAME), ) ) async_add_entities(entities) class MicrosoftFaceIdentifyEntity(ImageProcessingFaceEntity): """Representation of the Microsoft Face API entity for identify.""" def __init__(self, camera_entity, api, face_group, confidence, name=None): """Initialize the Microsoft Face API.""" super().__init__() self._api = api self._camera = camera_entity self._confidence = confidence self._face_group = face_group if name: self._name = name else: self._name = f"MicrosoftFace {split_entity_id(camera_entity)[1]}" @property def confidence(self): """Return minimum confidence for send events.""" return self._confidence @property def camera_entity(self): """Return camera entity id from process pictures.""" return self._camera @property def name(self): """Return the name of the entity.""" return self._name async def async_process_image(self, image): """Process image. This method is a coroutine. """ detect = [] try: face_data = await self._api.call_api("post", "detect", image, binary=True) if face_data: face_ids = [data["faceId"] for data in face_data] detect = await self._api.call_api( "post", "identify", {"faceIds": face_ids, "personGroupId": self._face_group}, ) except HomeAssistantError as err: _LOGGER.error("Can't process image on Microsoft face: %s", err) return # Parse data known_faces = [] total = 0 for face in detect: total += 1 if not face["candidates"]: continue data = face["candidates"][0] name = "" for s_name, s_id in self._api.store[self._face_group].items(): if data["personId"] == s_id: name = s_name break known_faces.append( {ATTR_NAME: name, ATTR_CONFIDENCE: data["confidence"] * 100} ) self.async_process_faces(known_faces, total)
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/microsoft_face_identify/image_processing.py
"""Support for Huawei LTE sensors.""" import logging import re from typing import Optional import attr from homeassistant.components.sensor import ( DEVICE_CLASS_SIGNAL_STRENGTH, DOMAIN as SENSOR_DOMAIN, ) from homeassistant.const import CONF_URL, DATA_BYTES, STATE_UNKNOWN, TIME_SECONDS from . import HuaweiLteBaseEntity from .const import ( DOMAIN, KEY_DEVICE_INFORMATION, KEY_DEVICE_SIGNAL, KEY_MONITORING_MONTH_STATISTICS, KEY_MONITORING_STATUS, KEY_MONITORING_TRAFFIC_STATISTICS, KEY_NET_CURRENT_PLMN, KEY_NET_NET_MODE, KEY_SMS_SMS_COUNT, SENSOR_KEYS, ) _LOGGER = logging.getLogger(__name__) SENSOR_META = { KEY_DEVICE_INFORMATION: dict( include=re.compile(r"^WanIP.*Address$", re.IGNORECASE) ), (KEY_DEVICE_INFORMATION, "WanIPAddress"): dict( name="WAN IP address", icon="mdi:ip", enabled_default=True ), (KEY_DEVICE_INFORMATION, "WanIPv6Address"): dict( name="WAN IPv6 address", icon="mdi:ip" ), (KEY_DEVICE_SIGNAL, "band"): dict(name="Band"), (KEY_DEVICE_SIGNAL, "cell_id"): dict(name="Cell ID"), (KEY_DEVICE_SIGNAL, "lac"): dict(name="LAC", icon="mdi:map-marker"), (KEY_DEVICE_SIGNAL, "mode"): dict( name="Mode", formatter=lambda x: ({"0": "2G", "2": "3G", "7": "4G"}.get(x, "Unknown"), None), ), (KEY_DEVICE_SIGNAL, "pci"): dict(name="PCI"), (KEY_DEVICE_SIGNAL, "rsrq"): dict( name="RSRQ", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # http://www.lte-anbieter.info/technik/rsrq.php icon=lambda x: (x is None or x < -11) and "mdi:signal-cellular-outline" or x < -8 and "mdi:signal-cellular-1" or x < -5 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", enabled_default=True, ), (KEY_DEVICE_SIGNAL, "rsrp"): dict( name="RSRP", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # http://www.lte-anbieter.info/technik/rsrp.php icon=lambda x: (x is None or x < -110) and "mdi:signal-cellular-outline" or x < -95 and "mdi:signal-cellular-1" or x < -80 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", enabled_default=True, ), (KEY_DEVICE_SIGNAL, "rssi"): dict( name="RSSI", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # https://eyesaas.com/wi-fi-signal-strength/ icon=lambda x: (x is None or x < -80) and "mdi:signal-cellular-outline" or x < -70 and "mdi:signal-cellular-1" or x < -60 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", enabled_default=True, ), (KEY_DEVICE_SIGNAL, "sinr"): dict( name="SINR", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # http://www.lte-anbieter.info/technik/sinr.php icon=lambda x: (x is None or x < 0) and "mdi:signal-cellular-outline" or x < 5 and "mdi:signal-cellular-1" or x < 10 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", enabled_default=True, ), (KEY_DEVICE_SIGNAL, "rscp"): dict( name="RSCP", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # https://wiki.teltonika.lt/view/RSCP icon=lambda x: (x is None or x < -95) and "mdi:signal-cellular-outline" or x < -85 and "mdi:signal-cellular-1" or x < -75 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", ), (KEY_DEVICE_SIGNAL, "ecio"): dict( name="EC/IO", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # https://wiki.teltonika.lt/view/EC/IO icon=lambda x: (x is None or x < -20) and "mdi:signal-cellular-outline" or x < -10 and "mdi:signal-cellular-1" or x < -6 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", ), KEY_MONITORING_MONTH_STATISTICS: dict( exclude=re.compile(r"^month(duration|lastcleartime)$", re.IGNORECASE) ), (KEY_MONITORING_MONTH_STATISTICS, "CurrentMonthDownload"): dict( name="Current month download", unit=DATA_BYTES, icon="mdi:download" ), (KEY_MONITORING_MONTH_STATISTICS, "CurrentMonthUpload"): dict( name="Current month upload", unit=DATA_BYTES, icon="mdi:upload" ), KEY_MONITORING_STATUS: dict( include=re.compile( r"^(currentwifiuser|(primary|secondary).*dns)$", re.IGNORECASE ) ), (KEY_MONITORING_STATUS, "CurrentWifiUser"): dict( name="WiFi clients connected", icon="mdi:wifi" ), (KEY_MONITORING_STATUS, "PrimaryDns"): dict( name="Primary DNS server", icon="mdi:ip" ), (KEY_MONITORING_STATUS, "SecondaryDns"): dict( name="Secondary DNS server", icon="mdi:ip" ), (KEY_MONITORING_STATUS, "PrimaryIPv6Dns"): dict( name="Primary IPv6 DNS server", icon="mdi:ip" ), (KEY_MONITORING_STATUS, "SecondaryIPv6Dns"): dict( name="Secondary IPv6 DNS server", icon="mdi:ip" ), KEY_MONITORING_TRAFFIC_STATISTICS: dict( exclude=re.compile(r"^showtraffic$", re.IGNORECASE) ), (KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentConnectTime"): dict( name="Current connection duration", unit=TIME_SECONDS, icon="mdi:timer-outline" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentDownload"): dict( name="Current connection download", unit=DATA_BYTES, icon="mdi:download" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentUpload"): dict( name="Current connection upload", unit=DATA_BYTES, icon="mdi:upload" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "TotalConnectTime"): dict( name="Total connected duration", unit=TIME_SECONDS, icon="mdi:timer-outline" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "TotalDownload"): dict( name="Total download", unit=DATA_BYTES, icon="mdi:download" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "TotalUpload"): dict( name="Total upload", unit=DATA_BYTES, icon="mdi:upload" ), KEY_NET_CURRENT_PLMN: dict(exclude=re.compile(r"^(Rat|ShortName)$", re.IGNORECASE)), (KEY_NET_CURRENT_PLMN, "State"): dict( name="Operator search mode", formatter=lambda x: ({"0": "Auto", "1": "Manual"}.get(x, "Unknown"), None), ), (KEY_NET_CURRENT_PLMN, "FullName"): dict(name="Operator name",), (KEY_NET_CURRENT_PLMN, "Numeric"): dict(name="Operator code",), KEY_NET_NET_MODE: dict(include=re.compile(r"^NetworkMode$", re.IGNORECASE)), (KEY_NET_NET_MODE, "NetworkMode"): dict( name="Preferred mode", formatter=lambda x: ( { "00": "4G/3G/2G", "01": "2G", "02": "3G", "03": "4G", "0301": "4G/2G", "0302": "4G/3G", "0201": "3G/2G", }.get(x, "Unknown"), None, ), ), (KEY_SMS_SMS_COUNT, "LocalUnread"): dict( name="SMS unread", icon="mdi:email-receive", ), } async def async_setup_entry(hass, config_entry, async_add_entities): """Set up from config entry.""" router = hass.data[DOMAIN].routers[config_entry.data[CONF_URL]] sensors = [] for key in SENSOR_KEYS: items = router.data.get(key) if not items: continue key_meta = SENSOR_META.get(key) if key_meta: include = key_meta.get("include") if include: items = filter(include.search, items) exclude = key_meta.get("exclude") if exclude: items = [x for x in items if not exclude.search(x)] for item in items: sensors.append( HuaweiLteSensor(router, key, item, SENSOR_META.get((key, item), {})) ) async_add_entities(sensors, True) def format_default(value): """Format value.""" unit = None if value is not None: # Clean up value and infer unit, e.g. -71dBm, 15 dB match = re.match( r"([>=<]*)(?P<value>.+?)\s*(?P<unit>[a-zA-Z]+)\s*$", str(value) ) if match: try: value = float(match.group("value")) unit = match.group("unit") except ValueError: pass return value, unit @attr.s class HuaweiLteSensor(HuaweiLteBaseEntity): """Huawei LTE sensor entity.""" key: str = attr.ib() item: str = attr.ib() meta: dict = attr.ib() _state = attr.ib(init=False, default=STATE_UNKNOWN) _unit: str = attr.ib(init=False) async def async_added_to_hass(self): """Subscribe to needed data on add.""" await super().async_added_to_hass() self.router.subscriptions[self.key].add(f"{SENSOR_DOMAIN}/{self.item}") async def async_will_remove_from_hass(self): """Unsubscribe from needed data on remove.""" await super().async_will_remove_from_hass() self.router.subscriptions[self.key].remove(f"{SENSOR_DOMAIN}/{self.item}") @property def _entity_name(self) -> str: return self.meta.get("name", self.item) @property def _device_unique_id(self) -> str: return f"{self.key}.{self.item}" @property def state(self): """Return sensor state.""" return self._state @property def device_class(self) -> Optional[str]: """Return sensor device class.""" return self.meta.get("device_class") @property def unit_of_measurement(self): """Return sensor's unit of measurement.""" return self.meta.get("unit", self._unit) @property def icon(self): """Return icon for sensor.""" icon = self.meta.get("icon") if callable(icon): return icon(self.state) return icon @property def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" return bool(self.meta.get("enabled_default")) async def async_update(self): """Update state.""" try: value = self.router.data[self.key][self.item] except KeyError: _LOGGER.debug("%s[%s] not in data", self.key, self.item) self._available = False return self._available = True formatter = self.meta.get("formatter") if not callable(formatter): formatter = format_default self._state, self._unit = formatter(value) async def async_setup_platform(*args, **kwargs): """Old no longer used way to set up Huawei LTE sensors.""" _LOGGER.warning( "Loading and configuring as a platform is no longer supported or " "required, convert to enabling/disabling available entities" )
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/huawei_lte/sensor.py
"""Support for Yamaha MusicCast Receivers.""" import logging import socket import pymusiccast import voluptuous as vol from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity from homeassistant.components.media_player.const import ( MEDIA_TYPE_MUSIC, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PREVIOUS_TRACK, SUPPORT_SELECT_SOURCE, SUPPORT_STOP, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, ) from homeassistant.const import ( CONF_HOST, CONF_PORT, STATE_IDLE, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_UNKNOWN, ) import homeassistant.helpers.config_validation as cv import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) SUPPORTED_FEATURES = ( SUPPORT_PLAY | SUPPORT_PAUSE | SUPPORT_STOP | SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | SUPPORT_SELECT_SOURCE ) KNOWN_HOSTS_KEY = "data_yamaha_musiccast" INTERVAL_SECONDS = "interval_seconds" DEFAULT_PORT = 5005 DEFAULT_INTERVAL = 480 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(INTERVAL_SECONDS, default=DEFAULT_INTERVAL): cv.positive_int, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Yamaha MusicCast platform.""" known_hosts = hass.data.get(KNOWN_HOSTS_KEY) if known_hosts is None: known_hosts = hass.data[KNOWN_HOSTS_KEY] = [] _LOGGER.debug("known_hosts: %s", known_hosts) host = config.get(CONF_HOST) port = config.get(CONF_PORT) interval = config.get(INTERVAL_SECONDS) # Get IP of host to prevent duplicates try: ipaddr = socket.gethostbyname(host) except (OSError) as error: _LOGGER.error("Could not communicate with %s:%d: %s", host, port, error) return if [item for item in known_hosts if item[0] == ipaddr]: _LOGGER.warning("Host %s:%d already registered", host, port) return if [item for item in known_hosts if item[1] == port]: _LOGGER.warning("Port %s:%d already registered", host, port) return reg_host = (ipaddr, port) known_hosts.append(reg_host) try: receiver = pymusiccast.McDevice(ipaddr, udp_port=port, mc_interval=interval) except pymusiccast.exceptions.YMCInitError as err: _LOGGER.error(err) receiver = None if receiver: for zone in receiver.zones: _LOGGER.debug("Receiver: %s / Port: %d / Zone: %s", receiver, port, zone) add_entities([YamahaDevice(receiver, receiver.zones[zone])], True) else: known_hosts.remove(reg_host) class YamahaDevice(MediaPlayerEntity): """Representation of a Yamaha MusicCast device.""" def __init__(self, recv, zone): """Initialize the Yamaha MusicCast device.""" self._recv = recv self._name = recv.name self._source = None self._source_list = [] self._zone = zone self.mute = False self.media_status = None self.media_status_received = None self.power = STATE_UNKNOWN self.status = STATE_UNKNOWN self.volume = 0 self.volume_max = 0 self._recv.set_yamaha_device(self) self._zone.set_yamaha_device(self) @property def name(self): """Return the name of the device.""" return f"{self._name} ({self._zone.zone_id})" @property def state(self): """Return the state of the device.""" if self.power == STATE_ON and self.status != STATE_UNKNOWN: return self.status return self.power @property def should_poll(self): """Push an update after each command.""" return True @property def is_volume_muted(self): """Boolean if volume is currently muted.""" return self.mute @property def volume_level(self): """Volume level of the media player (0..1).""" return self.volume @property def supported_features(self): """Flag of features that are supported.""" return SUPPORTED_FEATURES @property def source(self): """Return the current input source.""" return self._source @property def source_list(self): """List of available input sources.""" return self._source_list @source_list.setter def source_list(self, value): """Set source_list attribute.""" self._source_list = value @property def media_content_type(self): """Return the media content type.""" return MEDIA_TYPE_MUSIC @property def media_duration(self): """Duration of current playing media in seconds.""" return self.media_status.media_duration if self.media_status else None @property def media_image_url(self): """Image url of current playing media.""" return self.media_status.media_image_url if self.media_status else None @property def media_artist(self): """Artist of current playing media, music track only.""" return self.media_status.media_artist if self.media_status else None @property def media_album(self): """Album of current playing media, music track only.""" return self.media_status.media_album if self.media_status else None @property def media_track(self): """Track number of current playing media, music track only.""" return self.media_status.media_track if self.media_status else None @property def media_title(self): """Title of current playing media.""" return self.media_status.media_title if self.media_status else None @property def media_position(self): """Position of current playing media in seconds.""" if self.media_status and self.state in [ STATE_PLAYING, STATE_PAUSED, STATE_IDLE, ]: return self.media_status.media_position @property def media_position_updated_at(self): """When was the position of the current playing media valid. Returns value from homeassistant.util.dt.utcnow(). """ return self.media_status_received if self.media_status else None def update(self): """Get the latest details from the device.""" _LOGGER.debug("update: %s", self.entity_id) self._recv.update_status() self._zone.update_status() def update_hass(self): """Push updates to Home Assistant.""" if self.entity_id: _LOGGER.debug("update_hass: pushing updates") self.schedule_update_ha_state() return True def turn_on(self): """Turn on specified media player or all.""" _LOGGER.debug("Turn device: on") self._zone.set_power(True) def turn_off(self): """Turn off specified media player or all.""" _LOGGER.debug("Turn device: off") self._zone.set_power(False) def media_play(self): """Send the media player the command for play/pause.""" _LOGGER.debug("Play") self._recv.set_playback("play") def media_pause(self): """Send the media player the command for pause.""" _LOGGER.debug("Pause") self._recv.set_playback("pause") def media_stop(self): """Send the media player the stop command.""" _LOGGER.debug("Stop") self._recv.set_playback("stop") def media_previous_track(self): """Send the media player the command for prev track.""" _LOGGER.debug("Previous") self._recv.set_playback("previous") def media_next_track(self): """Send the media player the command for next track.""" _LOGGER.debug("Next") self._recv.set_playback("next") def mute_volume(self, mute): """Send mute command.""" _LOGGER.debug("Mute volume: %s", mute) self._zone.set_mute(mute) def set_volume_level(self, volume): """Set volume level, range 0..1.""" _LOGGER.debug("Volume level: %.2f / %d", volume, volume * self.volume_max) self._zone.set_volume(volume * self.volume_max) def select_source(self, source): """Send the media player the command to select input source.""" _LOGGER.debug("select_source: %s", source) self.status = STATE_UNKNOWN self._zone.set_input(source) def new_media_status(self, status): """Handle updates of the media status.""" _LOGGER.debug("new media_status arrived") self.media_status = status self.media_status_received = dt_util.utcnow()
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/yamaha_musiccast/media_player.py
"""Support for Ubee router.""" import logging from pyubee import Ubee import voluptuous as vol from homeassistant.components.device_tracker import ( DOMAIN, PLATFORM_SCHEMA, DeviceScanner, ) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_MODEL = "model" DEFAULT_MODEL = "detect" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Optional(CONF_MODEL, default=DEFAULT_MODEL): vol.Any( "EVW32C-0N", "EVW320B", "EVW321B", "EVW3200-Wifi", "EVW3226@UPC", "DVW32CB", "DDW36C", ), } ) def get_scanner(hass, config): """Validate the configuration and return a Ubee scanner.""" info = config[DOMAIN] host = info[CONF_HOST] username = info[CONF_USERNAME] password = info[CONF_PASSWORD] model = info[CONF_MODEL] ubee = Ubee(host, username, password, model) if not ubee.login(): _LOGGER.error("Login failed") return None scanner = UbeeDeviceScanner(ubee) return scanner class UbeeDeviceScanner(DeviceScanner): """This class queries a wireless Ubee router.""" def __init__(self, ubee): """Initialize the Ubee scanner.""" self._ubee = ubee self._mac2name = {} def scan_devices(self): """Scan for new devices and return a list with found device IDs.""" devices = self._get_connected_devices() self._mac2name = devices return list(devices) def get_device_name(self, device): """Return the name of the given device or None if we don't know.""" return self._mac2name.get(device) def _get_connected_devices(self): """List connected devices with pyubee.""" if not self._ubee.session_active(): self._ubee.login() return self._ubee.get_connected_devices()
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/ubee/device_tracker.py
"""Component to interface with switches that can be controlled remotely.""" from datetime import timedelta import logging import voluptuous as vol from homeassistant.const import ( SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ON, ) from homeassistant.helpers.config_validation import ( # noqa: F401 PLATFORM_SCHEMA, PLATFORM_SCHEMA_BASE, ) from homeassistant.helpers.entity import ToggleEntity from homeassistant.helpers.entity_component import EntityComponent from homeassistant.loader import bind_hass # mypy: allow-untyped-defs, no-check-untyped-defs DOMAIN = "switch" SCAN_INTERVAL = timedelta(seconds=30) ENTITY_ID_FORMAT = DOMAIN + ".{}" ATTR_TODAY_ENERGY_KWH = "today_energy_kwh" ATTR_CURRENT_POWER_W = "current_power_w" MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10) PROP_TO_ATTR = { "current_power_w": ATTR_CURRENT_POWER_W, "today_energy_kwh": ATTR_TODAY_ENERGY_KWH, } DEVICE_CLASS_OUTLET = "outlet" DEVICE_CLASS_SWITCH = "switch" DEVICE_CLASSES = [DEVICE_CLASS_OUTLET, DEVICE_CLASS_SWITCH] DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.In(DEVICE_CLASSES)) _LOGGER = logging.getLogger(__name__) @bind_hass def is_on(hass, entity_id): """Return if the switch is on based on the statemachine. Async friendly. """ return hass.states.is_state(entity_id, STATE_ON) async def async_setup(hass, config): """Track states and offer events for switches.""" component = hass.data[DOMAIN] = EntityComponent( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") return True async def async_setup_entry(hass, entry): """Set up a config entry.""" return await hass.data[DOMAIN].async_setup_entry(entry) async def async_unload_entry(hass, entry): """Unload a config entry.""" return await hass.data[DOMAIN].async_unload_entry(entry) class SwitchEntity(ToggleEntity): """Representation of a switch.""" @property def current_power_w(self): """Return the current power usage in W.""" return None @property def today_energy_kwh(self): """Return the today total energy usage in kWh.""" return None @property def is_standby(self): """Return true if device is in standby.""" return None @property def state_attributes(self): """Return the optional state attributes.""" data = {} for prop, attr in PROP_TO_ATTR.items(): value = getattr(self, prop) if value is not None: data[attr] = value return data @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return None class SwitchDevice(SwitchEntity): """Representation of a switch (for backwards compatibility).""" def __init_subclass__(cls, **kwargs): """Print deprecation warning.""" super().__init_subclass__(**kwargs) _LOGGER.warning( "SwitchDevice is deprecated, modify %s to extend SwitchEntity", cls.__name__, )
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/switch/__init__.py
"""Support for Lutron Caseta shades.""" import logging from homeassistant.components.cover import ( ATTR_POSITION, DOMAIN, SUPPORT_CLOSE, SUPPORT_OPEN, SUPPORT_SET_POSITION, CoverEntity, ) from . import DOMAIN as CASETA_DOMAIN, LutronCasetaDevice _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Lutron Caseta cover platform. Adds shades from the Caseta bridge associated with the config_entry as cover entities. """ entities = [] bridge = hass.data[CASETA_DOMAIN][config_entry.entry_id] cover_devices = bridge.get_devices_by_domain(DOMAIN) for cover_device in cover_devices: entity = LutronCasetaCover(cover_device, bridge) entities.append(entity) async_add_entities(entities, True) class LutronCasetaCover(LutronCasetaDevice, CoverEntity): """Representation of a Lutron shade.""" @property def supported_features(self): """Flag supported features.""" return SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION @property def is_closed(self): """Return if the cover is closed.""" return self._device["current_state"] < 1 @property def current_cover_position(self): """Return the current position of cover.""" return self._device["current_state"] async def async_close_cover(self, **kwargs): """Close the cover.""" self._smartbridge.set_value(self.device_id, 0) async def async_open_cover(self, **kwargs): """Open the cover.""" self._smartbridge.set_value(self.device_id, 100) async def async_set_cover_position(self, **kwargs): """Move the shade to a specific position.""" if ATTR_POSITION in kwargs: position = kwargs[ATTR_POSITION] self._smartbridge.set_value(self.device_id, position) async def async_update(self): """Call when forcing a refresh of the device.""" self._device = self._smartbridge.get_device_by_id(self.device_id) _LOGGER.debug(self._device)
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/lutron_caseta/cover.py
"""Support for (EMEA/EU-based) Honeywell TCC climate systems. Such systems include evohome, Round Thermostat, and others. """ from datetime import datetime as dt, timedelta import logging import re from typing import Any, Dict, Optional, Tuple import aiohttp.client_exceptions import evohomeasync import evohomeasync2 import voluptuous as vol from homeassistant.const import ( ATTR_ENTITY_ID, CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME, HTTP_SERVICE_UNAVAILABLE, HTTP_TOO_MANY_REQUESTS, TEMP_CELSIUS, ) from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import Entity from homeassistant.helpers.service import verify_domain_control from homeassistant.helpers.typing import ConfigType, HomeAssistantType import homeassistant.util.dt as dt_util from .const import DOMAIN, EVO_FOLLOW, GWS, STORAGE_KEY, STORAGE_VER, TCS, UTC_OFFSET _LOGGER = logging.getLogger(__name__) ACCESS_TOKEN = "access_token" ACCESS_TOKEN_EXPIRES = "access_token_expires" REFRESH_TOKEN = "refresh_token" USER_DATA = "user_data" CONF_LOCATION_IDX = "location_idx" SCAN_INTERVAL_DEFAULT = timedelta(seconds=300) SCAN_INTERVAL_MINIMUM = timedelta(seconds=60) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_LOCATION_IDX, default=0): cv.positive_int, vol.Optional( CONF_SCAN_INTERVAL, default=SCAN_INTERVAL_DEFAULT ): vol.All(cv.time_period, vol.Range(min=SCAN_INTERVAL_MINIMUM)), } ) }, extra=vol.ALLOW_EXTRA, ) ATTR_SYSTEM_MODE = "mode" ATTR_DURATION_DAYS = "period" ATTR_DURATION_HOURS = "duration" ATTR_ZONE_TEMP = "setpoint" ATTR_DURATION_UNTIL = "duration" SVC_REFRESH_SYSTEM = "refresh_system" SVC_SET_SYSTEM_MODE = "set_system_mode" SVC_RESET_SYSTEM = "reset_system" SVC_SET_ZONE_OVERRIDE = "set_zone_override" SVC_RESET_ZONE_OVERRIDE = "clear_zone_override" RESET_ZONE_OVERRIDE_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.entity_id}) SET_ZONE_OVERRIDE_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_ZONE_TEMP): vol.All( vol.Coerce(float), vol.Range(min=4.0, max=35.0) ), vol.Optional(ATTR_DURATION_UNTIL): vol.All( cv.time_period, vol.Range(min=timedelta(days=0), max=timedelta(days=1)) ), } ) # system mode schemas are built dynamically, below def _dt_local_to_aware(dt_naive: dt) -> dt: dt_aware = dt_util.now() + (dt_naive - dt.now()) if dt_aware.microsecond >= 500000: dt_aware += timedelta(seconds=1) return dt_aware.replace(microsecond=0) def _dt_aware_to_naive(dt_aware: dt) -> dt: dt_naive = dt.now() + (dt_aware - dt_util.now()) if dt_naive.microsecond >= 500000: dt_naive += timedelta(seconds=1) return dt_naive.replace(microsecond=0) def convert_until(status_dict: dict, until_key: str) -> None: """Reformat a dt str from "%Y-%m-%dT%H:%M:%SZ" as local/aware/isoformat.""" if until_key in status_dict: # only present for certain modes dt_utc_naive = dt_util.parse_datetime(status_dict[until_key]) status_dict[until_key] = dt_util.as_local(dt_utc_naive).isoformat() def convert_dict(dictionary: Dict[str, Any]) -> Dict[str, Any]: """Recursively convert a dict's keys to snake_case.""" def convert_key(key: str) -> str: """Convert a string to snake_case.""" string = re.sub(r"[\-\.\s]", "_", str(key)) return (string[0]).lower() + re.sub( r"[A-Z]", lambda matched: f"_{matched.group(0).lower()}", string[1:] ) return { (convert_key(k) if isinstance(k, str) else k): ( convert_dict(v) if isinstance(v, dict) else v ) for k, v in dictionary.items() } def _handle_exception(err) -> bool: """Return False if the exception can't be ignored.""" try: raise err except evohomeasync2.AuthenticationError: _LOGGER.error( "Failed to authenticate with the vendor's server. " "Check your network and the vendor's service status page. " "Also check that your username and password are correct. " "Message is: %s", err, ) return False except aiohttp.ClientConnectionError: # this appears to be a common occurrence with the vendor's servers _LOGGER.warning( "Unable to connect with the vendor's server. " "Check your network and the vendor's service status page. " "Message is: %s", err, ) return False except aiohttp.ClientResponseError: if err.status == HTTP_SERVICE_UNAVAILABLE: _LOGGER.warning( "The vendor says their server is currently unavailable. " "Check the vendor's service status page" ) return False if err.status == HTTP_TOO_MANY_REQUESTS: _LOGGER.warning( "The vendor's API rate limit has been exceeded. " "If this message persists, consider increasing the %s", CONF_SCAN_INTERVAL, ) return False raise # we don't expect/handle any other Exceptions async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Create a (EMEA/EU-based) Honeywell TCC system.""" async def load_auth_tokens(store) -> Tuple[Dict, Optional[Dict]]: app_storage = await store.async_load() tokens = dict(app_storage if app_storage else {}) if tokens.pop(CONF_USERNAME, None) != config[DOMAIN][CONF_USERNAME]: # any tokens won't be valid, and store might be be corrupt await store.async_save({}) return ({}, None) # evohomeasync2 requires naive/local datetimes as strings if tokens.get(ACCESS_TOKEN_EXPIRES) is not None: tokens[ACCESS_TOKEN_EXPIRES] = _dt_aware_to_naive( dt_util.parse_datetime(tokens[ACCESS_TOKEN_EXPIRES]) ) user_data = tokens.pop(USER_DATA, None) return (tokens, user_data) store = hass.helpers.storage.Store(STORAGE_VER, STORAGE_KEY) tokens, user_data = await load_auth_tokens(store) client_v2 = evohomeasync2.EvohomeClient( config[DOMAIN][CONF_USERNAME], config[DOMAIN][CONF_PASSWORD], **tokens, session=async_get_clientsession(hass), ) try: await client_v2.login() except (aiohttp.ClientError, evohomeasync2.AuthenticationError) as err: _handle_exception(err) return False finally: config[DOMAIN][CONF_PASSWORD] = "REDACTED" loc_idx = config[DOMAIN][CONF_LOCATION_IDX] try: loc_config = client_v2.installation_info[loc_idx] except IndexError: _LOGGER.error( "Config error: '%s' = %s, but the valid range is 0-%s. " "Unable to continue. Fix any configuration errors and restart HA", CONF_LOCATION_IDX, loc_idx, len(client_v2.installation_info) - 1, ) return False if _LOGGER.isEnabledFor(logging.DEBUG): _config = {"locationInfo": {"timeZone": None}, GWS: [{TCS: None}]} _config["locationInfo"]["timeZone"] = loc_config["locationInfo"]["timeZone"] _config[GWS][0][TCS] = loc_config[GWS][0][TCS] _LOGGER.debug("Config = %s", _config) client_v1 = evohomeasync.EvohomeClient( client_v2.username, client_v2.password, user_data=user_data, session=async_get_clientsession(hass), ) hass.data[DOMAIN] = {} hass.data[DOMAIN]["broker"] = broker = EvoBroker( hass, client_v2, client_v1, store, config[DOMAIN] ) await broker.save_auth_tokens() await broker.async_update() # get initial state hass.async_create_task(async_load_platform(hass, "climate", DOMAIN, {}, config)) if broker.tcs.hotwater: hass.async_create_task( async_load_platform(hass, "water_heater", DOMAIN, {}, config) ) hass.helpers.event.async_track_time_interval( broker.async_update, config[DOMAIN][CONF_SCAN_INTERVAL] ) setup_service_functions(hass, broker) return True @callback def setup_service_functions(hass: HomeAssistantType, broker): """Set up the service handlers for the system/zone operating modes. Not all Honeywell TCC-compatible systems support all operating modes. In addition, each mode will require any of four distinct service schemas. This has to be enumerated before registering the appropriate handlers. It appears that all TCC-compatible systems support the same three zones modes. """ @verify_domain_control(hass, DOMAIN) async def force_refresh(call) -> None: """Obtain the latest state data via the vendor's RESTful API.""" await broker.async_update() @verify_domain_control(hass, DOMAIN) async def set_system_mode(call) -> None: """Set the system mode.""" payload = { "unique_id": broker.tcs.systemId, "service": call.service, "data": call.data, } async_dispatcher_send(hass, DOMAIN, payload) @verify_domain_control(hass, DOMAIN) async def set_zone_override(call) -> None: """Set the zone override (setpoint).""" entity_id = call.data[ATTR_ENTITY_ID] registry = await hass.helpers.entity_registry.async_get_registry() registry_entry = registry.async_get(entity_id) if registry_entry is None or registry_entry.platform != DOMAIN: raise ValueError(f"'{entity_id}' is not a known {DOMAIN} entity") if registry_entry.domain != "climate": raise ValueError(f"'{entity_id}' is not an {DOMAIN} controller/zone") payload = { "unique_id": registry_entry.unique_id, "service": call.service, "data": call.data, } async_dispatcher_send(hass, DOMAIN, payload) hass.services.async_register(DOMAIN, SVC_REFRESH_SYSTEM, force_refresh) # Enumerate which operating modes are supported by this system modes = broker.config["allowedSystemModes"] # Not all systems support "AutoWithReset": register this handler only if required if [m["systemMode"] for m in modes if m["systemMode"] == "AutoWithReset"]: hass.services.async_register(DOMAIN, SVC_RESET_SYSTEM, set_system_mode) system_mode_schemas = [] modes = [m for m in modes if m["systemMode"] != "AutoWithReset"] # Permanent-only modes will use this schema perm_modes = [m["systemMode"] for m in modes if not m["canBeTemporary"]] if perm_modes: # any of: "Auto", "HeatingOff": permanent only schema = vol.Schema({vol.Required(ATTR_SYSTEM_MODE): vol.In(perm_modes)}) system_mode_schemas.append(schema) modes = [m for m in modes if m["canBeTemporary"]] # These modes are set for a number of hours (or indefinitely): use this schema temp_modes = [m["systemMode"] for m in modes if m["timingMode"] == "Duration"] if temp_modes: # any of: "AutoWithEco", permanent or for 0-24 hours schema = vol.Schema( { vol.Required(ATTR_SYSTEM_MODE): vol.In(temp_modes), vol.Optional(ATTR_DURATION_HOURS): vol.All( cv.time_period, vol.Range(min=timedelta(hours=0), max=timedelta(hours=24)), ), } ) system_mode_schemas.append(schema) # These modes are set for a number of days (or indefinitely): use this schema temp_modes = [m["systemMode"] for m in modes if m["timingMode"] == "Period"] if temp_modes: # any of: "Away", "Custom", "DayOff", permanent or for 1-99 days schema = vol.Schema( { vol.Required(ATTR_SYSTEM_MODE): vol.In(temp_modes), vol.Optional(ATTR_DURATION_DAYS): vol.All( cv.time_period, vol.Range(min=timedelta(days=1), max=timedelta(days=99)), ), } ) system_mode_schemas.append(schema) if system_mode_schemas: hass.services.async_register( DOMAIN, SVC_SET_SYSTEM_MODE, set_system_mode, schema=vol.Any(*system_mode_schemas), ) # The zone modes are consistent across all systems and use the same schema hass.services.async_register( DOMAIN, SVC_RESET_ZONE_OVERRIDE, set_zone_override, schema=RESET_ZONE_OVERRIDE_SCHEMA, ) hass.services.async_register( DOMAIN, SVC_SET_ZONE_OVERRIDE, set_zone_override, schema=SET_ZONE_OVERRIDE_SCHEMA, ) class EvoBroker: """Container for evohome client and data.""" def __init__(self, hass, client, client_v1, store, params) -> None: """Initialize the evohome client and its data structure.""" self.hass = hass self.client = client self.client_v1 = client_v1 self._store = store self.params = params loc_idx = params[CONF_LOCATION_IDX] self.config = client.installation_info[loc_idx][GWS][0][TCS][0] self.tcs = client.locations[loc_idx]._gateways[0]._control_systems[0] self.tcs_utc_offset = timedelta( minutes=client.locations[loc_idx].timeZone[UTC_OFFSET] ) self.temps = {} async def save_auth_tokens(self) -> None: """Save access tokens and session IDs to the store for later use.""" # evohomeasync2 uses naive/local datetimes access_token_expires = _dt_local_to_aware(self.client.access_token_expires) app_storage = {CONF_USERNAME: self.client.username} app_storage[REFRESH_TOKEN] = self.client.refresh_token app_storage[ACCESS_TOKEN] = self.client.access_token app_storage[ACCESS_TOKEN_EXPIRES] = access_token_expires.isoformat() if self.client_v1 and self.client_v1.user_data: app_storage[USER_DATA] = { "userInfo": {"userID": self.client_v1.user_data["userInfo"]["userID"]}, "sessionId": self.client_v1.user_data["sessionId"], } else: app_storage[USER_DATA] = None await self._store.async_save(app_storage) async def call_client_api(self, api_function, refresh=True) -> Any: """Call a client API.""" try: result = await api_function except (aiohttp.ClientError, evohomeasync2.AuthenticationError) as err: if not _handle_exception(err): return if refresh: self.hass.helpers.event.async_call_later(1, self.async_update()) return result async def _update_v1(self, *args, **kwargs) -> None: """Get the latest high-precision temperatures of the default Location.""" def get_session_id(client_v1) -> Optional[str]: user_data = client_v1.user_data if client_v1 else None return user_data.get("sessionId") if user_data else None session_id = get_session_id(self.client_v1) try: temps = list(await self.client_v1.temperatures(force_refresh=True)) except aiohttp.ClientError as err: _LOGGER.warning( "Unable to obtain the latest high-precision temperatures. " "Check your network and the vendor's service status page. " "Proceeding with low-precision temperatures. " "Message is: %s", err, ) self.temps = None # these are now stale, will fall back to v2 temps else: if ( str(self.client_v1.location_id) != self.client.locations[self.params[CONF_LOCATION_IDX]].locationId ): _LOGGER.warning( "The v2 API's configured location doesn't match " "the v1 API's default location (there is more than one location), " "so the high-precision feature will be disabled" ) self.client_v1 = self.temps = None else: self.temps = {str(i["id"]): i["temp"] for i in temps} _LOGGER.debug("Temperatures = %s", self.temps) if session_id != get_session_id(self.client_v1): await self.save_auth_tokens() async def _update_v2(self, *args, **kwargs) -> None: """Get the latest modes, temperatures, setpoints of a Location.""" access_token = self.client.access_token loc_idx = self.params[CONF_LOCATION_IDX] try: status = await self.client.locations[loc_idx].status() except (aiohttp.ClientError, evohomeasync2.AuthenticationError) as err: _handle_exception(err) else: async_dispatcher_send(self.hass, DOMAIN) _LOGGER.debug("Status = %s", status) if access_token != self.client.access_token: await self.save_auth_tokens() async def async_update(self, *args, **kwargs) -> None: """Get the latest state data of an entire Honeywell TCC Location. This includes state data for a Controller and all its child devices, such as the operating mode of the Controller and the current temp of its children (e.g. Zones, DHW controller). """ await self._update_v2() if self.client_v1: await self._update_v1() # inform the evohome devices that state data has been updated async_dispatcher_send(self.hass, DOMAIN) class EvoDevice(Entity): """Base for any evohome device. This includes the Controller, (up to 12) Heating Zones and (optionally) a DHW controller. """ def __init__(self, evo_broker, evo_device) -> None: """Initialize the evohome entity.""" self._evo_device = evo_device self._evo_broker = evo_broker self._evo_tcs = evo_broker.tcs self._unique_id = self._name = self._icon = self._precision = None self._supported_features = None self._device_state_attrs = {} async def async_refresh(self, payload: Optional[dict] = None) -> None: """Process any signals.""" if payload is None: self.async_schedule_update_ha_state(force_refresh=True) return if payload["unique_id"] != self._unique_id: return if payload["service"] in [SVC_SET_ZONE_OVERRIDE, SVC_RESET_ZONE_OVERRIDE]: await self.async_zone_svc_request(payload["service"], payload["data"]) return await self.async_tcs_svc_request(payload["service"], payload["data"]) async def async_tcs_svc_request(self, service: dict, data: dict) -> None: """Process a service request (system mode) for a controller.""" raise NotImplementedError async def async_zone_svc_request(self, service: dict, data: dict) -> None: """Process a service request (setpoint override) for a zone.""" raise NotImplementedError @property def should_poll(self) -> bool: """Evohome entities should not be polled.""" return False @property def unique_id(self) -> Optional[str]: """Return a unique ID.""" return self._unique_id @property def name(self) -> str: """Return the name of the evohome entity.""" return self._name @property def device_state_attributes(self) -> Dict[str, Any]: """Return the evohome-specific state attributes.""" status = self._device_state_attrs if "systemModeStatus" in status: convert_until(status["systemModeStatus"], "timeUntil") if "setpointStatus" in status: convert_until(status["setpointStatus"], "until") if "stateStatus" in status: convert_until(status["stateStatus"], "until") return {"status": convert_dict(status)} @property def icon(self) -> str: """Return the icon to use in the frontend UI.""" return self._icon @property def supported_features(self) -> int: """Get the flag of supported features of the device.""" return self._supported_features async def async_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" async_dispatcher_connect(self.hass, DOMAIN, self.async_refresh) @property def precision(self) -> float: """Return the temperature precision to use in the frontend UI.""" return self._precision @property def temperature_unit(self) -> str: """Return the temperature unit to use in the frontend UI.""" return TEMP_CELSIUS class EvoChild(EvoDevice): """Base for any evohome child. This includes (up to 12) Heating Zones and (optionally) a DHW controller. """ def __init__(self, evo_broker, evo_device) -> None: """Initialize a evohome Controller (hub).""" super().__init__(evo_broker, evo_device) self._schedule = {} self._setpoints = {} @property def current_temperature(self) -> Optional[float]: """Return the current temperature of a Zone.""" if self._evo_broker.temps: if self._evo_broker.temps[self._evo_device.zoneId] != 128: return self._evo_broker.temps[self._evo_device.zoneId] if self._evo_device.temperatureStatus["isAvailable"]: return self._evo_device.temperatureStatus["temperature"] @property def setpoints(self) -> Dict[str, Any]: """Return the current/next setpoints from the schedule. Only Zones & DHW controllers (but not the TCS) can have schedules. """ def _dt_evo_to_aware(dt_naive: dt, utc_offset: timedelta) -> dt: dt_aware = dt_naive.replace(tzinfo=dt_util.UTC) - utc_offset return dt_util.as_local(dt_aware) if not self._schedule["DailySchedules"]: return {} # no schedule {'DailySchedules': []}, so no scheduled setpoints day_time = dt_util.now() day_of_week = int(day_time.strftime("%w")) # 0 is Sunday time_of_day = day_time.strftime("%H:%M:%S") try: # Iterate today's switchpoints until past the current time of day... day = self._schedule["DailySchedules"][day_of_week] sp_idx = -1 # last switchpoint of the day before for i, tmp in enumerate(day["Switchpoints"]): if time_of_day > tmp["TimeOfDay"]: sp_idx = i # current setpoint else: break # Did the current SP start yesterday? Does the next start SP tomorrow? this_sp_day = -1 if sp_idx == -1 else 0 next_sp_day = 1 if sp_idx + 1 == len(day["Switchpoints"]) else 0 for key, offset, idx in [ ("this", this_sp_day, sp_idx), ("next", next_sp_day, (sp_idx + 1) * (1 - next_sp_day)), ]: sp_date = (day_time + timedelta(days=offset)).strftime("%Y-%m-%d") day = self._schedule["DailySchedules"][(day_of_week + offset) % 7] switchpoint = day["Switchpoints"][idx] dt_aware = _dt_evo_to_aware( dt_util.parse_datetime(f"{sp_date}T{switchpoint['TimeOfDay']}"), self._evo_broker.tcs_utc_offset, ) self._setpoints[f"{key}_sp_from"] = dt_aware.isoformat() try: self._setpoints[f"{key}_sp_temp"] = switchpoint["heatSetpoint"] except KeyError: self._setpoints[f"{key}_sp_state"] = switchpoint["DhwState"] except IndexError: self._setpoints = {} _LOGGER.warning( "Failed to get setpoints, report as an issue if this error persists", exc_info=True, ) return self._setpoints async def _update_schedule(self) -> None: """Get the latest schedule, if any.""" if "DailySchedules" in self._schedule and not self._schedule["DailySchedules"]: if not self._evo_device.setpointStatus["setpointMode"] == EVO_FOLLOW: return # avoid unnecessary I/O - there's nothing to update self._schedule = await self._evo_broker.call_client_api( self._evo_device.schedule(), refresh=False ) _LOGGER.debug("Schedule['%s'] = %s", self.name, self._schedule) async def async_update(self) -> None: """Get the latest state data.""" next_sp_from = self._setpoints.get("next_sp_from", "2000-01-01T00:00:00+00:00") if dt_util.now() >= dt_util.parse_datetime(next_sp_from): await self._update_schedule() # no schedule, or it's out-of-date self._device_state_attrs = {"setpoints": self.setpoints}
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/evohome/__init__.py
"""Support for particulate matter sensors connected to a serial port.""" import logging from pmsensor import serial_pm as pm import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, CONF_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity _LOGGER = logging.getLogger(__name__) CONF_BRAND = "brand" CONF_SERIAL_DEVICE = "serial_device" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_BRAND): cv.string, vol.Required(CONF_SERIAL_DEVICE): cv.string, vol.Optional(CONF_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the available PM sensors.""" try: coll = pm.PMDataCollector( config.get(CONF_SERIAL_DEVICE), pm.SUPPORTED_SENSORS[config.get(CONF_BRAND)] ) except KeyError: _LOGGER.error( "Brand %s not supported\n supported brands: %s", config.get(CONF_BRAND), pm.SUPPORTED_SENSORS.keys(), ) return except OSError as err: _LOGGER.error( "Could not open serial connection to %s (%s)", config.get(CONF_SERIAL_DEVICE), err, ) return dev = [] for pmname in coll.supported_values(): if config.get(CONF_NAME) is not None: name = "{} PM{}".format(config.get(CONF_NAME), pmname) else: name = f"PM{pmname}" dev.append(ParticulateMatterSensor(coll, name, pmname)) add_entities(dev) class ParticulateMatterSensor(Entity): """Representation of an Particulate matter sensor.""" def __init__(self, pmDataCollector, name, pmname): """Initialize a new PM sensor.""" self._name = name self._pmname = pmname self._state = None self._collector = pmDataCollector @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return CONCENTRATION_MICROGRAMS_PER_CUBIC_METER def update(self): """Read from sensor and update the state.""" _LOGGER.debug("Reading data from PM sensor") try: self._state = self._collector.read_data()[self._pmname] except KeyError: _LOGGER.error("Could not read PM%s value", self._pmname)
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/serial_pm/sensor.py
"""Support for Luftdaten sensors.""" import logging from homeassistant.const import ( ATTR_ATTRIBUTION, ATTR_LATITUDE, ATTR_LONGITUDE, CONF_SHOW_ON_MAP, ) from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity from . import ( DATA_LUFTDATEN, DATA_LUFTDATEN_CLIENT, DEFAULT_ATTRIBUTION, DOMAIN, SENSORS, TOPIC_UPDATE, ) from .const import ATTR_SENSOR_ID _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, entry, async_add_entities): """Set up a Luftdaten sensor based on a config entry.""" luftdaten = hass.data[DOMAIN][DATA_LUFTDATEN_CLIENT][entry.entry_id] sensors = [] for sensor_type in luftdaten.sensor_conditions: try: name, icon, unit = SENSORS[sensor_type] except KeyError: _LOGGER.debug("Unknown sensor value type: %s", sensor_type) continue sensors.append( LuftdatenSensor( luftdaten, sensor_type, name, icon, unit, entry.data[CONF_SHOW_ON_MAP] ) ) async_add_entities(sensors, True) class LuftdatenSensor(Entity): """Implementation of a Luftdaten sensor.""" def __init__(self, luftdaten, sensor_type, name, icon, unit, show): """Initialize the Luftdaten sensor.""" self._async_unsub_dispatcher_connect = None self.luftdaten = luftdaten self._icon = icon self._name = name self._data = None self.sensor_type = sensor_type self._unit_of_measurement = unit self._show_on_map = show self._attrs = {} @property def icon(self): """Return the icon.""" return self._icon @property def state(self): """Return the state of the device.""" if self._data is not None: return self._data[self.sensor_type] @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement @property def should_poll(self): """Disable polling.""" return False @property def unique_id(self) -> str: """Return a unique, friendly identifier for this entity.""" if self._data is not None: return f"{self._data['sensor_id']}_{self.sensor_type}" @property def device_state_attributes(self): """Return the state attributes.""" self._attrs[ATTR_ATTRIBUTION] = DEFAULT_ATTRIBUTION if self._data is not None: self._attrs[ATTR_SENSOR_ID] = self._data["sensor_id"] on_map = ATTR_LATITUDE, ATTR_LONGITUDE no_map = "lat", "long" lat_format, lon_format = on_map if self._show_on_map else no_map try: self._attrs[lon_format] = self._data["longitude"] self._attrs[lat_format] = self._data["latitude"] return self._attrs except KeyError: return async def async_added_to_hass(self): """Register callbacks.""" @callback def update(): """Update the state.""" self.async_schedule_update_ha_state(True) self._async_unsub_dispatcher_connect = async_dispatcher_connect( self.hass, TOPIC_UPDATE, update ) async def async_will_remove_from_hass(self): """Disconnect dispatcher listener when removed.""" if self._async_unsub_dispatcher_connect: self._async_unsub_dispatcher_connect() async def async_update(self): """Get the latest data and update the state.""" try: self._data = self.luftdaten.data[DATA_LUFTDATEN] except KeyError: return
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/luftdaten/sensor.py
"""Support for LIRC devices.""" # pylint: disable=no-member, import-error import logging import threading import time import lirc import voluptuous as vol from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP _LOGGER = logging.getLogger(__name__) BUTTON_NAME = "button_name" DOMAIN = "lirc" EVENT_IR_COMMAND_RECEIVED = "ir_command_received" ICON = "mdi:remote" CONFIG_SCHEMA = vol.Schema({DOMAIN: vol.Schema({})}, extra=vol.ALLOW_EXTRA) def setup(hass, config): """Set up the LIRC capability.""" # blocking=True gives unexpected behavior (multiple responses for 1 press) # also by not blocking, we allow hass to shut down the thread gracefully # on exit. lirc.init("home-assistant", blocking=False) lirc_interface = LircInterface(hass) def _start_lirc(_event): lirc_interface.start() def _stop_lirc(_event): lirc_interface.stopped.set() hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_lirc) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _stop_lirc) return True class LircInterface(threading.Thread): """ This interfaces with the lirc daemon to read IR commands. When using lirc in blocking mode, sometimes repeated commands get produced in the next read of a command so we use a thread here to just wait around until a non-empty response is obtained from lirc. """ def __init__(self, hass): """Construct a LIRC interface object.""" threading.Thread.__init__(self) self.daemon = True self.stopped = threading.Event() self.hass = hass def run(self): """Run the loop of the LIRC interface thread.""" _LOGGER.debug("LIRC interface thread started") while not self.stopped.isSet(): try: code = lirc.nextcode() # list; empty if no buttons pressed except lirc.NextCodeError: _LOGGER.warning("Error reading next code from LIRC") code = None # interpret result from python-lirc if code: code = code[0] _LOGGER.info("Got new LIRC code %s", code) self.hass.bus.fire(EVENT_IR_COMMAND_RECEIVED, {BUTTON_NAME: code}) else: time.sleep(0.2) lirc.deinit() _LOGGER.debug("LIRC interface thread stopped")
"""The tests for the Demo vacuum platform.""" import pytest from homeassistant.components import vacuum from homeassistant.components.demo.vacuum import ( DEMO_VACUUM_BASIC, DEMO_VACUUM_COMPLETE, DEMO_VACUUM_MINIMAL, DEMO_VACUUM_MOST, DEMO_VACUUM_NONE, DEMO_VACUUM_STATE, FAN_SPEEDS, ) from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_COMMAND, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, ATTR_STATUS, DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, STATE_DOCKED, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, ) from homeassistant.setup import async_setup_component from tests.common import async_mock_service from tests.components.vacuum import common ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower() @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass): """Initialize setup demo vacuum.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() async def test_supported_features(hass): """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195 assert state.attributes.get(ATTR_STATUS) == "Charging" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_STATUS) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.state == STATE_OFF state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436 assert state.state == STATE_DOCKED assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS async def test_methods(hass): """Test if methods call the services as expected.""" hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC) hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC) await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_stop(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.attributes.get(ATTR_STATUS) != "Charging" await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "I'm over here" in state.attributes.get(ATTR_STATUS) await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "Returning home" in state.attributes.get(ATTR_STATUS) await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE ) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) assert "spot" in state.attributes.get(ATTR_STATUS) assert state.state == STATE_ON await common.async_start(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING await common.async_pause(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_PAUSED await common.async_stop(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_IDLE state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100 assert state.state != STATE_DOCKED await common.async_return_to_base(hass, ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE ) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state == STATE_CLEANING async def test_unsupported_methods(hass): """Test service calls for unsupported vacuums.""" hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_off(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_stop(hass, ENTITY_VACUUM_NONE) assert vacuum.is_on(hass, ENTITY_VACUUM_NONE) hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_turn_on(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_toggle(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) # Non supported methods: await common.async_start_pause(hass, ENTITY_VACUUM_NONE) assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE) await common.async_locate(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_return_to_base(hass, ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_STATUS) is None await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE) state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1] await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC) state = hass.states.get(ENTITY_VACUUM_BASIC) assert "spot" not in state.attributes.get(ATTR_STATUS) assert state.state == STATE_OFF # VacuumEntity should not support start and pause methods. hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON) await hass.async_block_till_done() assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF) await hass.async_block_till_done() assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) await common.async_start(hass, ENTITY_VACUUM_COMPLETE) assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE) # StateVacuumEntity does not support on/off await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_RETURNING await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE) state = hass.states.get(ENTITY_VACUUM_STATE) assert state.state != STATE_CLEANING async def test_services(hass): """Test vacuum services.""" # Test send_command send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params ) assert len(send_command_calls) == 1 call = send_command_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed( hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE ) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_set_fan_speed(hass): """Test vacuum service to set the fan speed.""" group_vacuums = ",".join( [ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE] ) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) old_state_state = hass.states.get(ENTITY_VACUUM_STATE) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) new_state_state = hass.states.get(ENTITY_VACUUM_STATE) assert old_state_basic == new_state_basic assert ATTR_FAN_SPEED not in new_state_basic.attributes assert old_state_complete != new_state_complete assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] assert old_state_state != new_state_state assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1] assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0] async def test_send_command(hass): """Test vacuum service to send a command.""" group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE]) old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) await common.async_send_command( hass, "test_command", params={"p1": 3}, entity_id=group_vacuums ) new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC) new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_basic == new_state_basic assert old_state_complete != new_state_complete assert new_state_complete.state == STATE_ON assert ( new_state_complete.attributes[ATTR_STATUS] == "Executing test_command({'p1': 3})" )
pschmitt/home-assistant
tests/components/demo/test_vacuum.py
homeassistant/components/lirc/__init__.py
# ######################### LICENSE ############################ # # Copyright (c) 2005-2015, Michele Simionato # All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # Redistributions in bytecode form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS # OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH # DAMAGE. """ Decorator module, see https://pypi.python.org/pypi/decorator for the documentation. """ import re import sys import inspect import operator import itertools import collections from inspect import getfullargspec __version__ = '4.0.5' def get_init(cls): return cls.__init__ # getargspec has been deprecated in Python 3.5 ArgSpec = collections.namedtuple( 'ArgSpec', 'args varargs varkw defaults') def getargspec(f): """A replacement for inspect.getargspec""" spec = getfullargspec(f) return ArgSpec(spec.args, spec.varargs, spec.varkw, spec.defaults) DEF = re.compile(r'\s*def\s*([_\w][_\w\d]*)\s*\(') # basic functionality class FunctionMaker: """ An object with the ability to create functions with a given signature. It has attributes name, doc, module, signature, defaults, dict, and methods update and make. """ # Atomic get-and-increment provided by the GIL _compile_count = itertools.count() def __init__(self, func=None, name=None, signature=None, defaults=None, doc=None, module=None, funcdict=None): self.shortsignature = signature if func: # func can be a class or a callable, but not an instance method self.name = func.__name__ if self.name == '<lambda>': # small hack for lambda functions self.name = '_lambda_' self.doc = func.__doc__ self.module = func.__module__ if inspect.isfunction(func): argspec = getfullargspec(func) self.annotations = getattr(func, '__annotations__', {}) for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs', 'kwonlydefaults'): setattr(self, a, getattr(argspec, a)) for i, arg in enumerate(self.args): setattr(self, 'arg%d' % i, arg) allargs = list(self.args) allshortargs = list(self.args) if self.varargs: allargs.append('*' + self.varargs) allshortargs.append('*' + self.varargs) elif self.kwonlyargs: allargs.append('*') # single star syntax for a in self.kwonlyargs: allargs.append('%s=None' % a) allshortargs.append('%s=%s' % (a, a)) if self.varkw: allargs.append('**' + self.varkw) allshortargs.append('**' + self.varkw) self.signature = ', '.join(allargs) self.shortsignature = ', '.join(allshortargs) self.dict = func.__dict__.copy() # func=None happens when decorating a caller if name: self.name = name if signature is not None: self.signature = signature if defaults: self.defaults = defaults if doc: self.doc = doc if module: self.module = module if funcdict: self.dict = funcdict # check existence required attributes assert hasattr(self, 'name') if not hasattr(self, 'signature'): raise TypeError('You are decorating a non-function: %s' % func) def update(self, func, **kw): "Update the signature of func with the data in self" func.__name__ = self.name func.__doc__ = getattr(self, 'doc', None) func.__dict__ = getattr(self, 'dict', {}) func.__defaults__ = getattr(self, 'defaults', ()) func.__kwdefaults__ = getattr(self, 'kwonlydefaults', None) func.__annotations__ = getattr(self, 'annotations', None) try: frame = sys._getframe(3) except AttributeError: # for IronPython and similar implementations callermodule = '?' else: callermodule = frame.f_globals.get('__name__', '?') func.__module__ = getattr(self, 'module', callermodule) func.__dict__.update(kw) def make(self, src_templ, evaldict=None, addsource=False, **attrs): "Make a new function from a given template and update the signature" src = src_templ % vars(self) # expand name and signature evaldict = evaldict or {} mo = DEF.match(src) if mo is None: raise SyntaxError('not a valid function template\n%s' % src) name = mo.group(1) # extract the function name names = set([name] + [arg.strip(' *') for arg in self.shortsignature.split(',')]) for n in names: if n in ('_func_', '_call_'): raise NameError('%s is overridden in\n%s' % (n, src)) if not src.endswith('\n'): # add a newline just for safety src += '\n' # this is needed in old versions of Python # Ensure each generated function has a unique filename for profilers # (such as cProfile) that depend on the tuple of (<filename>, # <definition line>, <function name>) being unique. filename = '<decorator-gen-%d>' % (next(self._compile_count),) try: code = compile(src, filename, 'single') exec(code, evaldict) except: # noqa: E722 print('Error in generated code:', file=sys.stderr) print(src, file=sys.stderr) raise func = evaldict[name] if addsource: attrs['__source__'] = src self.update(func, **attrs) return func @classmethod def create(cls, obj, body, evaldict, defaults=None, doc=None, module=None, addsource=True, **attrs): """ Create a function from the strings name, signature, and body. evaldict is the evaluation dictionary. If addsource is true, an attribute __source__ is added to the result. The attributes attrs are added, if any. """ if isinstance(obj, str): # "name(signature)" name, rest = obj.strip().split('(', 1) signature = rest[:-1] # strip a right parens func = None else: # a function name = None signature = None func = obj self = cls(func, name, signature, defaults, doc, module) ibody = '\n'.join(' ' + line for line in body.splitlines()) return self.make('def %(name)s(%(signature)s):\n' + ibody, evaldict, addsource, **attrs) def decorate(func, caller): """ decorate(func, caller) decorates a function using a caller. """ evaldict = func.__globals__.copy() evaldict['_call_'] = caller evaldict['_func_'] = func fun = FunctionMaker.create( func, "return _call_(_func_, %(shortsignature)s)", evaldict, __wrapped__=func) if hasattr(func, '__qualname__'): fun.__qualname__ = func.__qualname__ return fun def decorator(caller, _func=None): """decorator(caller) converts a caller function into a decorator""" if _func is not None: # return a decorated function # this is obsolete behavior; you should use decorate instead return decorate(_func, caller) # else return a decorator function if inspect.isclass(caller): name = caller.__name__.lower() callerfunc = get_init(caller) doc = 'decorator(%s) converts functions/generators into ' \ 'factories of %s objects' % (caller.__name__, caller.__name__) elif inspect.isfunction(caller): if caller.__name__ == '<lambda>': name = '_lambda_' else: name = caller.__name__ callerfunc = caller doc = caller.__doc__ else: # assume caller is an object with a __call__ method name = caller.__class__.__name__.lower() callerfunc = caller.__call__.__func__ doc = caller.__call__.__doc__ evaldict = callerfunc.__globals__.copy() evaldict['_call_'] = caller evaldict['_decorate_'] = decorate return FunctionMaker.create( '%s(func)' % name, 'return _decorate_(func, _call_)', evaldict, doc=doc, module=caller.__module__, __wrapped__=caller) # ####################### contextmanager ####################### # try: # Python >= 3.2 from contextlib import _GeneratorContextManager except ImportError: # Python >= 2.5 from contextlib import GeneratorContextManager as _GeneratorContextManager class ContextManager(_GeneratorContextManager): def __call__(self, func): """Context manager decorator""" return FunctionMaker.create( func, "with _self_: return _func_(%(shortsignature)s)", dict(_self_=self, _func_=func), __wrapped__=func) init = getfullargspec(_GeneratorContextManager.__init__) n_args = len(init.args) if n_args == 2 and not init.varargs: # (self, genobj) Python 2.7 def __init__(self, g, *a, **k): return _GeneratorContextManager.__init__(self, g(*a, **k)) ContextManager.__init__ = __init__ elif n_args == 2 and init.varargs: # (self, gen, *a, **k) Python 3.4 pass elif n_args == 4: # (self, gen, args, kwds) Python 3.5 def __init__(self, g, *a, **k): return _GeneratorContextManager.__init__(self, g, a, k) ContextManager.__init__ = __init__ contextmanager = decorator(ContextManager) # ############################ dispatch_on ############################ # def append(a, vancestors): """ Append ``a`` to the list of the virtual ancestors, unless it is already included. """ add = True for j, va in enumerate(vancestors): if issubclass(va, a): add = False break if issubclass(a, va): vancestors[j] = a add = False if add: vancestors.append(a) # inspired from simplegeneric by P.J. Eby and functools.singledispatch def dispatch_on(*dispatch_args): """ Factory of decorators turning a function into a generic function dispatching on the given arguments. """ assert dispatch_args, 'No dispatch args passed' dispatch_str = '(%s,)' % ', '.join(dispatch_args) def check(arguments, wrong=operator.ne, msg=''): """Make sure one passes the expected number of arguments""" if wrong(len(arguments), len(dispatch_args)): raise TypeError('Expected %d arguments, got %d%s' % (len(dispatch_args), len(arguments), msg)) def gen_func_dec(func): """Decorator turning a function into a generic function""" # first check the dispatch arguments argset = set(getfullargspec(func).args) if not set(dispatch_args) <= argset: raise NameError('Unknown dispatch arguments %s' % dispatch_str) typemap = {} def vancestors(*types): """ Get a list of sets of virtual ancestors for the given types """ check(types) ras = [[] for _ in range(len(dispatch_args))] for types_ in typemap: for t, type_, ra in zip(types, types_, ras): if issubclass(t, type_) and type_ not in t.__mro__: append(type_, ra) return [set(ra) for ra in ras] def ancestors(*types): """ Get a list of virtual MROs, one for each type """ check(types) lists = [] for t, vas in zip(types, vancestors(*types)): n_vas = len(vas) if n_vas > 1: raise RuntimeError( 'Ambiguous dispatch for %s: %s' % (t, vas)) elif n_vas == 1: va, = vas mro = type('t', (t, va), {}).__mro__[1:] else: mro = t.__mro__ lists.append(mro[:-1]) # discard t and object return lists def register(*types): """ Decorator to register an implementation for the given types """ check(types) def dec(f): check(getfullargspec(f).args, operator.lt, ' in ' + f.__name__) typemap[types] = f return f return dec def dispatch_info(*types): """ An utility to introspect the dispatch algorithm """ check(types) lst = [tuple(a.__name__ for a in anc) for anc in itertools.product(*ancestors(*types))] return lst def _dispatch(dispatch_args, *args, **kw): types = tuple(type(arg) for arg in dispatch_args) try: # fast path f = typemap[types] except KeyError: pass else: return f(*args, **kw) combinations = itertools.product(*ancestors(*types)) next(combinations) # the first one has been already tried for types_ in combinations: f = typemap.get(types_) if f is not None: return f(*args, **kw) # else call the default implementation return func(*args, **kw) return FunctionMaker.create( func, 'return _f_(%s, %%(shortsignature)s)' % dispatch_str, dict(_f_=_dispatch), register=register, default=func, typemap=typemap, vancestors=vancestors, ancestors=ancestors, dispatch_info=dispatch_info, __wrapped__=func) gen_func_dec.__name__ = 'dispatch_on' + dispatch_str return gen_func_dec
from __future__ import annotations from typing import List, Tuple, Callable, Optional import pytest from itertools import product from numpy.testing import assert_allclose, suppress_warnings from scipy import special from scipy.special import cython_special bint_points = [True, False] int_points = [-10, -1, 1, 10] real_points = [-10.0, -1.0, 1.0, 10.0] complex_points = [complex(*tup) for tup in product(real_points, repeat=2)] CYTHON_SIGNATURE_MAP = { 'b': 'bint', 'f': 'float', 'd': 'double', 'g': 'long double', 'F': 'float complex', 'D': 'double complex', 'G': 'long double complex', 'i': 'int', 'l': 'long' } TEST_POINTS = { 'b': bint_points, 'f': real_points, 'd': real_points, 'g': real_points, 'F': complex_points, 'D': complex_points, 'G': complex_points, 'i': int_points, 'l': int_points, } PARAMS: List[Tuple[Callable, Callable, Tuple[str, ...], Optional[str]]] = [ (special.agm, cython_special.agm, ('dd',), None), (special.airy, cython_special._airy_pywrap, ('d', 'D'), None), (special.airye, cython_special._airye_pywrap, ('d', 'D'), None), (special.bdtr, cython_special.bdtr, ('dld', 'ddd'), None), (special.bdtrc, cython_special.bdtrc, ('dld', 'ddd'), None), (special.bdtri, cython_special.bdtri, ('dld', 'ddd'), None), (special.bdtrik, cython_special.bdtrik, ('ddd',), None), (special.bdtrin, cython_special.bdtrin, ('ddd',), None), (special.bei, cython_special.bei, ('d',), None), (special.beip, cython_special.beip, ('d',), None), (special.ber, cython_special.ber, ('d',), None), (special.berp, cython_special.berp, ('d',), None), (special.besselpoly, cython_special.besselpoly, ('ddd',), None), (special.beta, cython_special.beta, ('dd',), None), (special.betainc, cython_special.betainc, ('ddd',), None), (special.betaincinv, cython_special.betaincinv, ('ddd',), None), (special.betaln, cython_special.betaln, ('dd',), None), (special.binom, cython_special.binom, ('dd',), None), (special.boxcox, cython_special.boxcox, ('dd',), None), (special.boxcox1p, cython_special.boxcox1p, ('dd',), None), (special.btdtr, cython_special.btdtr, ('ddd',), None), (special.btdtri, cython_special.btdtri, ('ddd',), None), (special.btdtria, cython_special.btdtria, ('ddd',), None), (special.btdtrib, cython_special.btdtrib, ('ddd',), None), (special.cbrt, cython_special.cbrt, ('d',), None), (special.chdtr, cython_special.chdtr, ('dd',), None), (special.chdtrc, cython_special.chdtrc, ('dd',), None), (special.chdtri, cython_special.chdtri, ('dd',), None), (special.chdtriv, cython_special.chdtriv, ('dd',), None), (special.chndtr, cython_special.chndtr, ('ddd',), None), (special.chndtridf, cython_special.chndtridf, ('ddd',), None), (special.chndtrinc, cython_special.chndtrinc, ('ddd',), None), (special.chndtrix, cython_special.chndtrix, ('ddd',), None), (special.cosdg, cython_special.cosdg, ('d',), None), (special.cosm1, cython_special.cosm1, ('d',), None), (special.cotdg, cython_special.cotdg, ('d',), None), (special.dawsn, cython_special.dawsn, ('d', 'D'), None), (special.ellipe, cython_special.ellipe, ('d',), None), (special.ellipeinc, cython_special.ellipeinc, ('dd',), None), (special.ellipj, cython_special._ellipj_pywrap, ('dd',), None), (special.ellipkinc, cython_special.ellipkinc, ('dd',), None), (special.ellipkm1, cython_special.ellipkm1, ('d',), None), (special.ellipk, cython_special.ellipk, ('d',), None), (special.entr, cython_special.entr, ('d',), None), (special.erf, cython_special.erf, ('d', 'D'), None), (special.erfc, cython_special.erfc, ('d', 'D'), None), (special.erfcx, cython_special.erfcx, ('d', 'D'), None), (special.erfi, cython_special.erfi, ('d', 'D'), None), (special.erfinv, cython_special.erfinv, ('d',), None), (special.erfcinv, cython_special.erfcinv, ('d',), None), (special.eval_chebyc, cython_special.eval_chebyc, ('dd', 'dD', 'ld'), None), (special.eval_chebys, cython_special.eval_chebys, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyt, cython_special.eval_chebyt, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyu, cython_special.eval_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_gegenbauer, cython_special.eval_gegenbauer, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_genlaguerre, cython_special.eval_genlaguerre, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_hermite, cython_special.eval_hermite, ('ld',), None), (special.eval_hermitenorm, cython_special.eval_hermitenorm, ('ld',), None), (special.eval_jacobi, cython_special.eval_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_laguerre, cython_special.eval_laguerre, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_legendre, cython_special.eval_legendre, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyt, cython_special.eval_sh_chebyt, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyu, cython_special.eval_sh_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_sh_jacobi, cython_special.eval_sh_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_sh_legendre, cython_special.eval_sh_legendre, ('dd', 'dD', 'ld'), None), (special.exp1, cython_special.exp1, ('d', 'D'), None), (special.exp10, cython_special.exp10, ('d',), None), (special.exp2, cython_special.exp2, ('d',), None), (special.expi, cython_special.expi, ('d', 'D'), None), (special.expit, cython_special.expit, ('f', 'd', 'g'), None), (special.expm1, cython_special.expm1, ('d', 'D'), None), (special.expn, cython_special.expn, ('ld', 'dd'), None), (special.exprel, cython_special.exprel, ('d',), None), (special.fdtr, cython_special.fdtr, ('ddd',), None), (special.fdtrc, cython_special.fdtrc, ('ddd',), None), (special.fdtri, cython_special.fdtri, ('ddd',), None), (special.fdtridfd, cython_special.fdtridfd, ('ddd',), None), (special.fresnel, cython_special._fresnel_pywrap, ('d', 'D'), None), (special.gamma, cython_special.gamma, ('d', 'D'), None), (special.gammainc, cython_special.gammainc, ('dd',), None), (special.gammaincc, cython_special.gammaincc, ('dd',), None), (special.gammainccinv, cython_special.gammainccinv, ('dd',), None), (special.gammaincinv, cython_special.gammaincinv, ('dd',), None), (special.gammaln, cython_special.gammaln, ('d',), None), (special.gammasgn, cython_special.gammasgn, ('d',), None), (special.gdtr, cython_special.gdtr, ('ddd',), None), (special.gdtrc, cython_special.gdtrc, ('ddd',), None), (special.gdtria, cython_special.gdtria, ('ddd',), None), (special.gdtrib, cython_special.gdtrib, ('ddd',), None), (special.gdtrix, cython_special.gdtrix, ('ddd',), None), (special.hankel1, cython_special.hankel1, ('dD',), None), (special.hankel1e, cython_special.hankel1e, ('dD',), None), (special.hankel2, cython_special.hankel2, ('dD',), None), (special.hankel2e, cython_special.hankel2e, ('dD',), None), (special.huber, cython_special.huber, ('dd',), None), (special.hyp0f1, cython_special.hyp0f1, ('dd', 'dD'), None), (special.hyp1f1, cython_special.hyp1f1, ('ddd', 'ddD'), None), (special.hyp2f1, cython_special.hyp2f1, ('dddd', 'dddD'), None), (special.hyperu, cython_special.hyperu, ('ddd',), None), (special.i0, cython_special.i0, ('d',), None), (special.i0e, cython_special.i0e, ('d',), None), (special.i1, cython_special.i1, ('d',), None), (special.i1e, cython_special.i1e, ('d',), None), (special.inv_boxcox, cython_special.inv_boxcox, ('dd',), None), (special.inv_boxcox1p, cython_special.inv_boxcox1p, ('dd',), None), (special.it2i0k0, cython_special._it2i0k0_pywrap, ('d',), None), (special.it2j0y0, cython_special._it2j0y0_pywrap, ('d',), None), (special.it2struve0, cython_special.it2struve0, ('d',), None), (special.itairy, cython_special._itairy_pywrap, ('d',), None), (special.iti0k0, cython_special._iti0k0_pywrap, ('d',), None), (special.itj0y0, cython_special._itj0y0_pywrap, ('d',), None), (special.itmodstruve0, cython_special.itmodstruve0, ('d',), None), (special.itstruve0, cython_special.itstruve0, ('d',), None), (special.iv, cython_special.iv, ('dd', 'dD'), None), (special.ive, cython_special.ive, ('dd', 'dD'), None), (special.j0, cython_special.j0, ('d',), None), (special.j1, cython_special.j1, ('d',), None), (special.jv, cython_special.jv, ('dd', 'dD'), None), (special.jve, cython_special.jve, ('dd', 'dD'), None), (special.k0, cython_special.k0, ('d',), None), (special.k0e, cython_special.k0e, ('d',), None), (special.k1, cython_special.k1, ('d',), None), (special.k1e, cython_special.k1e, ('d',), None), (special.kei, cython_special.kei, ('d',), None), (special.keip, cython_special.keip, ('d',), None), (special.kelvin, cython_special._kelvin_pywrap, ('d',), None), (special.ker, cython_special.ker, ('d',), None), (special.kerp, cython_special.kerp, ('d',), None), (special.kl_div, cython_special.kl_div, ('dd',), None), (special.kn, cython_special.kn, ('ld', 'dd'), None), (special.kolmogi, cython_special.kolmogi, ('d',), None), (special.kolmogorov, cython_special.kolmogorov, ('d',), None), (special.kv, cython_special.kv, ('dd', 'dD'), None), (special.kve, cython_special.kve, ('dd', 'dD'), None), (special.log1p, cython_special.log1p, ('d', 'D'), None), (special.log_ndtr, cython_special.log_ndtr, ('d', 'D'), None), (special.ndtri_exp, cython_special.ndtri_exp, ('d',), None), (special.loggamma, cython_special.loggamma, ('D',), None), (special.logit, cython_special.logit, ('f', 'd', 'g'), None), (special.lpmv, cython_special.lpmv, ('ddd',), None), (special.mathieu_a, cython_special.mathieu_a, ('dd',), None), (special.mathieu_b, cython_special.mathieu_b, ('dd',), None), (special.mathieu_cem, cython_special._mathieu_cem_pywrap, ('ddd',), None), (special.mathieu_modcem1, cython_special._mathieu_modcem1_pywrap, ('ddd',), None), (special.mathieu_modcem2, cython_special._mathieu_modcem2_pywrap, ('ddd',), None), (special.mathieu_modsem1, cython_special._mathieu_modsem1_pywrap, ('ddd',), None), (special.mathieu_modsem2, cython_special._mathieu_modsem2_pywrap, ('ddd',), None), (special.mathieu_sem, cython_special._mathieu_sem_pywrap, ('ddd',), None), (special.modfresnelm, cython_special._modfresnelm_pywrap, ('d',), None), (special.modfresnelp, cython_special._modfresnelp_pywrap, ('d',), None), (special.modstruve, cython_special.modstruve, ('dd',), None), (special.nbdtr, cython_special.nbdtr, ('lld', 'ddd'), None), (special.nbdtrc, cython_special.nbdtrc, ('lld', 'ddd'), None), (special.nbdtri, cython_special.nbdtri, ('lld', 'ddd'), None), (special.nbdtrik, cython_special.nbdtrik, ('ddd',), None), (special.nbdtrin, cython_special.nbdtrin, ('ddd',), None), (special.ncfdtr, cython_special.ncfdtr, ('dddd',), None), (special.ncfdtri, cython_special.ncfdtri, ('dddd',), None), (special.ncfdtridfd, cython_special.ncfdtridfd, ('dddd',), None), (special.ncfdtridfn, cython_special.ncfdtridfn, ('dddd',), None), (special.ncfdtrinc, cython_special.ncfdtrinc, ('dddd',), None), (special.nctdtr, cython_special.nctdtr, ('ddd',), None), (special.nctdtridf, cython_special.nctdtridf, ('ddd',), None), (special.nctdtrinc, cython_special.nctdtrinc, ('ddd',), None), (special.nctdtrit, cython_special.nctdtrit, ('ddd',), None), (special.ndtr, cython_special.ndtr, ('d', 'D'), None), (special.ndtri, cython_special.ndtri, ('d',), None), (special.nrdtrimn, cython_special.nrdtrimn, ('ddd',), None), (special.nrdtrisd, cython_special.nrdtrisd, ('ddd',), None), (special.obl_ang1, cython_special._obl_ang1_pywrap, ('dddd',), None), (special.obl_ang1_cv, cython_special._obl_ang1_cv_pywrap, ('ddddd',), None), (special.obl_cv, cython_special.obl_cv, ('ddd',), None), (special.obl_rad1, cython_special._obl_rad1_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad1_cv, cython_special._obl_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.obl_rad2, cython_special._obl_rad2_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad2_cv, cython_special._obl_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pbdv, cython_special._pbdv_pywrap, ('dd',), None), (special.pbvv, cython_special._pbvv_pywrap, ('dd',), None), (special.pbwa, cython_special._pbwa_pywrap, ('dd',), None), (special.pdtr, cython_special.pdtr, ('dd', 'dd'), None), (special.pdtrc, cython_special.pdtrc, ('dd', 'dd'), None), (special.pdtri, cython_special.pdtri, ('ld', 'dd'), None), (special.pdtrik, cython_special.pdtrik, ('dd',), None), (special.poch, cython_special.poch, ('dd',), None), (special.pro_ang1, cython_special._pro_ang1_pywrap, ('dddd',), None), (special.pro_ang1_cv, cython_special._pro_ang1_cv_pywrap, ('ddddd',), None), (special.pro_cv, cython_special.pro_cv, ('ddd',), None), (special.pro_rad1, cython_special._pro_rad1_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad1_cv, cython_special._pro_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pro_rad2, cython_special._pro_rad2_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad2_cv, cython_special._pro_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pseudo_huber, cython_special.pseudo_huber, ('dd',), None), (special.psi, cython_special.psi, ('d', 'D'), None), (special.radian, cython_special.radian, ('ddd',), None), (special.rel_entr, cython_special.rel_entr, ('dd',), None), (special.rgamma, cython_special.rgamma, ('d', 'D'), None), (special.round, cython_special.round, ('d',), None), (special.spherical_jn, cython_special.spherical_jn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_yn, cython_special.spherical_yn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_in, cython_special.spherical_in, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_kn, cython_special.spherical_kn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.shichi, cython_special._shichi_pywrap, ('d', 'D'), None), (special.sici, cython_special._sici_pywrap, ('d', 'D'), None), (special.sindg, cython_special.sindg, ('d',), None), (special.smirnov, cython_special.smirnov, ('ld', 'dd'), None), (special.smirnovi, cython_special.smirnovi, ('ld', 'dd'), None), (special.spence, cython_special.spence, ('d', 'D'), None), (special.sph_harm, cython_special.sph_harm, ('lldd', 'dddd'), None), (special.stdtr, cython_special.stdtr, ('dd',), None), (special.stdtridf, cython_special.stdtridf, ('dd',), None), (special.stdtrit, cython_special.stdtrit, ('dd',), None), (special.struve, cython_special.struve, ('dd',), None), (special.tandg, cython_special.tandg, ('d',), None), (special.tklmbda, cython_special.tklmbda, ('dd',), None), (special.voigt_profile, cython_special.voigt_profile, ('ddd',), None), (special.wofz, cython_special.wofz, ('D',), None), (special.wright_bessel, cython_special.wright_bessel, ('ddd',), None), (special.wrightomega, cython_special.wrightomega, ('D',), None), (special.xlog1py, cython_special.xlog1py, ('dd', 'DD'), None), (special.xlogy, cython_special.xlogy, ('dd', 'DD'), None), (special.y0, cython_special.y0, ('d',), None), (special.y1, cython_special.y1, ('d',), None), (special.yn, cython_special.yn, ('ld', 'dd'), None), (special.yv, cython_special.yv, ('dd', 'dD'), None), (special.yve, cython_special.yve, ('dd', 'dD'), None), (special.zetac, cython_special.zetac, ('d',), None), (special.owens_t, cython_special.owens_t, ('dd',), None) ] IDS = [x[0].__name__ for x in PARAMS] def _generate_test_points(typecodes): axes = tuple(TEST_POINTS[x] for x in typecodes) pts = list(product(*axes)) return pts def test_cython_api_completeness(): # Check that everything is tested for name in dir(cython_special): func = getattr(cython_special, name) if callable(func) and not name.startswith('_'): for _, cyfun, _, _ in PARAMS: if cyfun is func: break else: raise RuntimeError(f"{name} missing from tests!") @pytest.mark.parametrize("param", PARAMS, ids=IDS) def test_cython_api(param): pyfunc, cyfunc, specializations, knownfailure = param if knownfailure: pytest.xfail(reason=knownfailure) # Check which parameters are expected to be fused types max_params = max(len(spec) for spec in specializations) values = [set() for _ in range(max_params)] for typecodes in specializations: for j, v in enumerate(typecodes): values[j].add(v) seen = set() is_fused_code = [False] * len(values) for j, v in enumerate(values): vv = tuple(sorted(v)) if vv in seen: continue is_fused_code[j] = (len(v) > 1) seen.add(vv) # Check results for typecodes in specializations: # Pick the correct specialized function signature = [CYTHON_SIGNATURE_MAP[code] for j, code in enumerate(typecodes) if is_fused_code[j]] if signature: cy_spec_func = cyfunc[tuple(signature)] else: signature = None cy_spec_func = cyfunc # Test it pts = _generate_test_points(typecodes) for pt in pts: with suppress_warnings() as sup: sup.filter(DeprecationWarning) pyval = pyfunc(*pt) cyval = cy_spec_func(*pt) assert_allclose(cyval, pyval, err_msg="{} {} {}".format(pt, typecodes, signature))
endolith/scipy
scipy/special/tests/test_cython_special.py
scipy/_lib/decorator.py
import os from os.path import join def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from scipy._build_utils import (get_f2py_int64_options, ilp64_pre_build_hook, uses_blas64) if uses_blas64(): # TODO: Note that fitpack does not use BLAS/LAPACK. # The reason why we use 64-bit ints only in this case # is because scipy._build_utils knows the 64-bit int # flags for too few Fortran compilers, so we cannot turn # this on by default. pre_build_hook = ilp64_pre_build_hook f2py_options = get_f2py_int64_options() define_macros = [("HAVE_ILP64", None)] else: pre_build_hook = None f2py_options = None define_macros = [] config = Configuration('interpolate', parent_package, top_path) fitpack_src = [join('fitpack', '*.f')] config.add_library('fitpack', sources=fitpack_src, _pre_build_hook=pre_build_hook) config.add_extension('interpnd', sources=['interpnd.c']) config.add_extension('_ppoly', sources=['_ppoly.c']) config.add_extension('_bspl', sources=['_bspl.c'], depends=['src/__fitpack.h']) config.add_extension('_fitpack', sources=['src/_fitpackmodule.c'], libraries=['fitpack'], define_macros=define_macros, depends=(['src/__fitpack.h'] + fitpack_src) ) config.add_extension('dfitpack', sources=['src/fitpack.pyf'], libraries=['fitpack'], define_macros=define_macros, depends=fitpack_src, f2py_options=f2py_options ) if int(os.environ.get('SCIPY_USE_PYTHRAN', 1)): from pythran.dist import PythranExtension ext = PythranExtension( 'scipy.interpolate._rbfinterp_pythran', sources=['scipy/interpolate/_rbfinterp_pythran.py'], config=['compiler.blas=none'] ) config.ext_modules.append(ext) config.add_data_dir('tests') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
from __future__ import annotations from typing import List, Tuple, Callable, Optional import pytest from itertools import product from numpy.testing import assert_allclose, suppress_warnings from scipy import special from scipy.special import cython_special bint_points = [True, False] int_points = [-10, -1, 1, 10] real_points = [-10.0, -1.0, 1.0, 10.0] complex_points = [complex(*tup) for tup in product(real_points, repeat=2)] CYTHON_SIGNATURE_MAP = { 'b': 'bint', 'f': 'float', 'd': 'double', 'g': 'long double', 'F': 'float complex', 'D': 'double complex', 'G': 'long double complex', 'i': 'int', 'l': 'long' } TEST_POINTS = { 'b': bint_points, 'f': real_points, 'd': real_points, 'g': real_points, 'F': complex_points, 'D': complex_points, 'G': complex_points, 'i': int_points, 'l': int_points, } PARAMS: List[Tuple[Callable, Callable, Tuple[str, ...], Optional[str]]] = [ (special.agm, cython_special.agm, ('dd',), None), (special.airy, cython_special._airy_pywrap, ('d', 'D'), None), (special.airye, cython_special._airye_pywrap, ('d', 'D'), None), (special.bdtr, cython_special.bdtr, ('dld', 'ddd'), None), (special.bdtrc, cython_special.bdtrc, ('dld', 'ddd'), None), (special.bdtri, cython_special.bdtri, ('dld', 'ddd'), None), (special.bdtrik, cython_special.bdtrik, ('ddd',), None), (special.bdtrin, cython_special.bdtrin, ('ddd',), None), (special.bei, cython_special.bei, ('d',), None), (special.beip, cython_special.beip, ('d',), None), (special.ber, cython_special.ber, ('d',), None), (special.berp, cython_special.berp, ('d',), None), (special.besselpoly, cython_special.besselpoly, ('ddd',), None), (special.beta, cython_special.beta, ('dd',), None), (special.betainc, cython_special.betainc, ('ddd',), None), (special.betaincinv, cython_special.betaincinv, ('ddd',), None), (special.betaln, cython_special.betaln, ('dd',), None), (special.binom, cython_special.binom, ('dd',), None), (special.boxcox, cython_special.boxcox, ('dd',), None), (special.boxcox1p, cython_special.boxcox1p, ('dd',), None), (special.btdtr, cython_special.btdtr, ('ddd',), None), (special.btdtri, cython_special.btdtri, ('ddd',), None), (special.btdtria, cython_special.btdtria, ('ddd',), None), (special.btdtrib, cython_special.btdtrib, ('ddd',), None), (special.cbrt, cython_special.cbrt, ('d',), None), (special.chdtr, cython_special.chdtr, ('dd',), None), (special.chdtrc, cython_special.chdtrc, ('dd',), None), (special.chdtri, cython_special.chdtri, ('dd',), None), (special.chdtriv, cython_special.chdtriv, ('dd',), None), (special.chndtr, cython_special.chndtr, ('ddd',), None), (special.chndtridf, cython_special.chndtridf, ('ddd',), None), (special.chndtrinc, cython_special.chndtrinc, ('ddd',), None), (special.chndtrix, cython_special.chndtrix, ('ddd',), None), (special.cosdg, cython_special.cosdg, ('d',), None), (special.cosm1, cython_special.cosm1, ('d',), None), (special.cotdg, cython_special.cotdg, ('d',), None), (special.dawsn, cython_special.dawsn, ('d', 'D'), None), (special.ellipe, cython_special.ellipe, ('d',), None), (special.ellipeinc, cython_special.ellipeinc, ('dd',), None), (special.ellipj, cython_special._ellipj_pywrap, ('dd',), None), (special.ellipkinc, cython_special.ellipkinc, ('dd',), None), (special.ellipkm1, cython_special.ellipkm1, ('d',), None), (special.ellipk, cython_special.ellipk, ('d',), None), (special.entr, cython_special.entr, ('d',), None), (special.erf, cython_special.erf, ('d', 'D'), None), (special.erfc, cython_special.erfc, ('d', 'D'), None), (special.erfcx, cython_special.erfcx, ('d', 'D'), None), (special.erfi, cython_special.erfi, ('d', 'D'), None), (special.erfinv, cython_special.erfinv, ('d',), None), (special.erfcinv, cython_special.erfcinv, ('d',), None), (special.eval_chebyc, cython_special.eval_chebyc, ('dd', 'dD', 'ld'), None), (special.eval_chebys, cython_special.eval_chebys, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyt, cython_special.eval_chebyt, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyu, cython_special.eval_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_gegenbauer, cython_special.eval_gegenbauer, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_genlaguerre, cython_special.eval_genlaguerre, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_hermite, cython_special.eval_hermite, ('ld',), None), (special.eval_hermitenorm, cython_special.eval_hermitenorm, ('ld',), None), (special.eval_jacobi, cython_special.eval_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_laguerre, cython_special.eval_laguerre, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_legendre, cython_special.eval_legendre, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyt, cython_special.eval_sh_chebyt, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyu, cython_special.eval_sh_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_sh_jacobi, cython_special.eval_sh_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_sh_legendre, cython_special.eval_sh_legendre, ('dd', 'dD', 'ld'), None), (special.exp1, cython_special.exp1, ('d', 'D'), None), (special.exp10, cython_special.exp10, ('d',), None), (special.exp2, cython_special.exp2, ('d',), None), (special.expi, cython_special.expi, ('d', 'D'), None), (special.expit, cython_special.expit, ('f', 'd', 'g'), None), (special.expm1, cython_special.expm1, ('d', 'D'), None), (special.expn, cython_special.expn, ('ld', 'dd'), None), (special.exprel, cython_special.exprel, ('d',), None), (special.fdtr, cython_special.fdtr, ('ddd',), None), (special.fdtrc, cython_special.fdtrc, ('ddd',), None), (special.fdtri, cython_special.fdtri, ('ddd',), None), (special.fdtridfd, cython_special.fdtridfd, ('ddd',), None), (special.fresnel, cython_special._fresnel_pywrap, ('d', 'D'), None), (special.gamma, cython_special.gamma, ('d', 'D'), None), (special.gammainc, cython_special.gammainc, ('dd',), None), (special.gammaincc, cython_special.gammaincc, ('dd',), None), (special.gammainccinv, cython_special.gammainccinv, ('dd',), None), (special.gammaincinv, cython_special.gammaincinv, ('dd',), None), (special.gammaln, cython_special.gammaln, ('d',), None), (special.gammasgn, cython_special.gammasgn, ('d',), None), (special.gdtr, cython_special.gdtr, ('ddd',), None), (special.gdtrc, cython_special.gdtrc, ('ddd',), None), (special.gdtria, cython_special.gdtria, ('ddd',), None), (special.gdtrib, cython_special.gdtrib, ('ddd',), None), (special.gdtrix, cython_special.gdtrix, ('ddd',), None), (special.hankel1, cython_special.hankel1, ('dD',), None), (special.hankel1e, cython_special.hankel1e, ('dD',), None), (special.hankel2, cython_special.hankel2, ('dD',), None), (special.hankel2e, cython_special.hankel2e, ('dD',), None), (special.huber, cython_special.huber, ('dd',), None), (special.hyp0f1, cython_special.hyp0f1, ('dd', 'dD'), None), (special.hyp1f1, cython_special.hyp1f1, ('ddd', 'ddD'), None), (special.hyp2f1, cython_special.hyp2f1, ('dddd', 'dddD'), None), (special.hyperu, cython_special.hyperu, ('ddd',), None), (special.i0, cython_special.i0, ('d',), None), (special.i0e, cython_special.i0e, ('d',), None), (special.i1, cython_special.i1, ('d',), None), (special.i1e, cython_special.i1e, ('d',), None), (special.inv_boxcox, cython_special.inv_boxcox, ('dd',), None), (special.inv_boxcox1p, cython_special.inv_boxcox1p, ('dd',), None), (special.it2i0k0, cython_special._it2i0k0_pywrap, ('d',), None), (special.it2j0y0, cython_special._it2j0y0_pywrap, ('d',), None), (special.it2struve0, cython_special.it2struve0, ('d',), None), (special.itairy, cython_special._itairy_pywrap, ('d',), None), (special.iti0k0, cython_special._iti0k0_pywrap, ('d',), None), (special.itj0y0, cython_special._itj0y0_pywrap, ('d',), None), (special.itmodstruve0, cython_special.itmodstruve0, ('d',), None), (special.itstruve0, cython_special.itstruve0, ('d',), None), (special.iv, cython_special.iv, ('dd', 'dD'), None), (special.ive, cython_special.ive, ('dd', 'dD'), None), (special.j0, cython_special.j0, ('d',), None), (special.j1, cython_special.j1, ('d',), None), (special.jv, cython_special.jv, ('dd', 'dD'), None), (special.jve, cython_special.jve, ('dd', 'dD'), None), (special.k0, cython_special.k0, ('d',), None), (special.k0e, cython_special.k0e, ('d',), None), (special.k1, cython_special.k1, ('d',), None), (special.k1e, cython_special.k1e, ('d',), None), (special.kei, cython_special.kei, ('d',), None), (special.keip, cython_special.keip, ('d',), None), (special.kelvin, cython_special._kelvin_pywrap, ('d',), None), (special.ker, cython_special.ker, ('d',), None), (special.kerp, cython_special.kerp, ('d',), None), (special.kl_div, cython_special.kl_div, ('dd',), None), (special.kn, cython_special.kn, ('ld', 'dd'), None), (special.kolmogi, cython_special.kolmogi, ('d',), None), (special.kolmogorov, cython_special.kolmogorov, ('d',), None), (special.kv, cython_special.kv, ('dd', 'dD'), None), (special.kve, cython_special.kve, ('dd', 'dD'), None), (special.log1p, cython_special.log1p, ('d', 'D'), None), (special.log_ndtr, cython_special.log_ndtr, ('d', 'D'), None), (special.ndtri_exp, cython_special.ndtri_exp, ('d',), None), (special.loggamma, cython_special.loggamma, ('D',), None), (special.logit, cython_special.logit, ('f', 'd', 'g'), None), (special.lpmv, cython_special.lpmv, ('ddd',), None), (special.mathieu_a, cython_special.mathieu_a, ('dd',), None), (special.mathieu_b, cython_special.mathieu_b, ('dd',), None), (special.mathieu_cem, cython_special._mathieu_cem_pywrap, ('ddd',), None), (special.mathieu_modcem1, cython_special._mathieu_modcem1_pywrap, ('ddd',), None), (special.mathieu_modcem2, cython_special._mathieu_modcem2_pywrap, ('ddd',), None), (special.mathieu_modsem1, cython_special._mathieu_modsem1_pywrap, ('ddd',), None), (special.mathieu_modsem2, cython_special._mathieu_modsem2_pywrap, ('ddd',), None), (special.mathieu_sem, cython_special._mathieu_sem_pywrap, ('ddd',), None), (special.modfresnelm, cython_special._modfresnelm_pywrap, ('d',), None), (special.modfresnelp, cython_special._modfresnelp_pywrap, ('d',), None), (special.modstruve, cython_special.modstruve, ('dd',), None), (special.nbdtr, cython_special.nbdtr, ('lld', 'ddd'), None), (special.nbdtrc, cython_special.nbdtrc, ('lld', 'ddd'), None), (special.nbdtri, cython_special.nbdtri, ('lld', 'ddd'), None), (special.nbdtrik, cython_special.nbdtrik, ('ddd',), None), (special.nbdtrin, cython_special.nbdtrin, ('ddd',), None), (special.ncfdtr, cython_special.ncfdtr, ('dddd',), None), (special.ncfdtri, cython_special.ncfdtri, ('dddd',), None), (special.ncfdtridfd, cython_special.ncfdtridfd, ('dddd',), None), (special.ncfdtridfn, cython_special.ncfdtridfn, ('dddd',), None), (special.ncfdtrinc, cython_special.ncfdtrinc, ('dddd',), None), (special.nctdtr, cython_special.nctdtr, ('ddd',), None), (special.nctdtridf, cython_special.nctdtridf, ('ddd',), None), (special.nctdtrinc, cython_special.nctdtrinc, ('ddd',), None), (special.nctdtrit, cython_special.nctdtrit, ('ddd',), None), (special.ndtr, cython_special.ndtr, ('d', 'D'), None), (special.ndtri, cython_special.ndtri, ('d',), None), (special.nrdtrimn, cython_special.nrdtrimn, ('ddd',), None), (special.nrdtrisd, cython_special.nrdtrisd, ('ddd',), None), (special.obl_ang1, cython_special._obl_ang1_pywrap, ('dddd',), None), (special.obl_ang1_cv, cython_special._obl_ang1_cv_pywrap, ('ddddd',), None), (special.obl_cv, cython_special.obl_cv, ('ddd',), None), (special.obl_rad1, cython_special._obl_rad1_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad1_cv, cython_special._obl_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.obl_rad2, cython_special._obl_rad2_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad2_cv, cython_special._obl_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pbdv, cython_special._pbdv_pywrap, ('dd',), None), (special.pbvv, cython_special._pbvv_pywrap, ('dd',), None), (special.pbwa, cython_special._pbwa_pywrap, ('dd',), None), (special.pdtr, cython_special.pdtr, ('dd', 'dd'), None), (special.pdtrc, cython_special.pdtrc, ('dd', 'dd'), None), (special.pdtri, cython_special.pdtri, ('ld', 'dd'), None), (special.pdtrik, cython_special.pdtrik, ('dd',), None), (special.poch, cython_special.poch, ('dd',), None), (special.pro_ang1, cython_special._pro_ang1_pywrap, ('dddd',), None), (special.pro_ang1_cv, cython_special._pro_ang1_cv_pywrap, ('ddddd',), None), (special.pro_cv, cython_special.pro_cv, ('ddd',), None), (special.pro_rad1, cython_special._pro_rad1_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad1_cv, cython_special._pro_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pro_rad2, cython_special._pro_rad2_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad2_cv, cython_special._pro_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pseudo_huber, cython_special.pseudo_huber, ('dd',), None), (special.psi, cython_special.psi, ('d', 'D'), None), (special.radian, cython_special.radian, ('ddd',), None), (special.rel_entr, cython_special.rel_entr, ('dd',), None), (special.rgamma, cython_special.rgamma, ('d', 'D'), None), (special.round, cython_special.round, ('d',), None), (special.spherical_jn, cython_special.spherical_jn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_yn, cython_special.spherical_yn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_in, cython_special.spherical_in, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_kn, cython_special.spherical_kn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.shichi, cython_special._shichi_pywrap, ('d', 'D'), None), (special.sici, cython_special._sici_pywrap, ('d', 'D'), None), (special.sindg, cython_special.sindg, ('d',), None), (special.smirnov, cython_special.smirnov, ('ld', 'dd'), None), (special.smirnovi, cython_special.smirnovi, ('ld', 'dd'), None), (special.spence, cython_special.spence, ('d', 'D'), None), (special.sph_harm, cython_special.sph_harm, ('lldd', 'dddd'), None), (special.stdtr, cython_special.stdtr, ('dd',), None), (special.stdtridf, cython_special.stdtridf, ('dd',), None), (special.stdtrit, cython_special.stdtrit, ('dd',), None), (special.struve, cython_special.struve, ('dd',), None), (special.tandg, cython_special.tandg, ('d',), None), (special.tklmbda, cython_special.tklmbda, ('dd',), None), (special.voigt_profile, cython_special.voigt_profile, ('ddd',), None), (special.wofz, cython_special.wofz, ('D',), None), (special.wright_bessel, cython_special.wright_bessel, ('ddd',), None), (special.wrightomega, cython_special.wrightomega, ('D',), None), (special.xlog1py, cython_special.xlog1py, ('dd', 'DD'), None), (special.xlogy, cython_special.xlogy, ('dd', 'DD'), None), (special.y0, cython_special.y0, ('d',), None), (special.y1, cython_special.y1, ('d',), None), (special.yn, cython_special.yn, ('ld', 'dd'), None), (special.yv, cython_special.yv, ('dd', 'dD'), None), (special.yve, cython_special.yve, ('dd', 'dD'), None), (special.zetac, cython_special.zetac, ('d',), None), (special.owens_t, cython_special.owens_t, ('dd',), None) ] IDS = [x[0].__name__ for x in PARAMS] def _generate_test_points(typecodes): axes = tuple(TEST_POINTS[x] for x in typecodes) pts = list(product(*axes)) return pts def test_cython_api_completeness(): # Check that everything is tested for name in dir(cython_special): func = getattr(cython_special, name) if callable(func) and not name.startswith('_'): for _, cyfun, _, _ in PARAMS: if cyfun is func: break else: raise RuntimeError(f"{name} missing from tests!") @pytest.mark.parametrize("param", PARAMS, ids=IDS) def test_cython_api(param): pyfunc, cyfunc, specializations, knownfailure = param if knownfailure: pytest.xfail(reason=knownfailure) # Check which parameters are expected to be fused types max_params = max(len(spec) for spec in specializations) values = [set() for _ in range(max_params)] for typecodes in specializations: for j, v in enumerate(typecodes): values[j].add(v) seen = set() is_fused_code = [False] * len(values) for j, v in enumerate(values): vv = tuple(sorted(v)) if vv in seen: continue is_fused_code[j] = (len(v) > 1) seen.add(vv) # Check results for typecodes in specializations: # Pick the correct specialized function signature = [CYTHON_SIGNATURE_MAP[code] for j, code in enumerate(typecodes) if is_fused_code[j]] if signature: cy_spec_func = cyfunc[tuple(signature)] else: signature = None cy_spec_func = cyfunc # Test it pts = _generate_test_points(typecodes) for pt in pts: with suppress_warnings() as sup: sup.filter(DeprecationWarning) pyval = pyfunc(*pt) cyval = cy_spec_func(*pt) assert_allclose(cyval, pyval, err_msg="{} {} {}".format(pt, typecodes, signature))
endolith/scipy
scipy/special/tests/test_cython_special.py
scipy/interpolate/setup.py
import pathlib from shutil import copyfile import subprocess import sys def isNPY_OLD(): ''' A new random C API was added in 1.18 and became stable in 1.19. Prefer the new random C API when building with recent numpy. ''' import numpy as np ver = tuple(int(num) for num in np.__version__.split('.')[:2]) return ver < (1, 19) def make_biasedurn(): '''Substitute True/False values for NPY_OLD Cython build variable.''' biasedurn_base = (pathlib.Path(__file__).parent / 'biasedurn').absolute() with open(biasedurn_base.with_suffix('.pyx.templ'), 'r') as src: contents = src.read() with open(biasedurn_base.with_suffix('.pyx'), 'w') as dest: dest.write(contents.format(NPY_OLD=str(bool(isNPY_OLD())))) def make_boost(): # Call code generator inside _boost directory code_gen = pathlib.Path(__file__).parent / '_boost/include/code_gen.py' subprocess.run([sys.executable, str(code_gen)], check=True) if __name__ == '__main__': make_biasedurn() make_boost()
from __future__ import annotations from typing import List, Tuple, Callable, Optional import pytest from itertools import product from numpy.testing import assert_allclose, suppress_warnings from scipy import special from scipy.special import cython_special bint_points = [True, False] int_points = [-10, -1, 1, 10] real_points = [-10.0, -1.0, 1.0, 10.0] complex_points = [complex(*tup) for tup in product(real_points, repeat=2)] CYTHON_SIGNATURE_MAP = { 'b': 'bint', 'f': 'float', 'd': 'double', 'g': 'long double', 'F': 'float complex', 'D': 'double complex', 'G': 'long double complex', 'i': 'int', 'l': 'long' } TEST_POINTS = { 'b': bint_points, 'f': real_points, 'd': real_points, 'g': real_points, 'F': complex_points, 'D': complex_points, 'G': complex_points, 'i': int_points, 'l': int_points, } PARAMS: List[Tuple[Callable, Callable, Tuple[str, ...], Optional[str]]] = [ (special.agm, cython_special.agm, ('dd',), None), (special.airy, cython_special._airy_pywrap, ('d', 'D'), None), (special.airye, cython_special._airye_pywrap, ('d', 'D'), None), (special.bdtr, cython_special.bdtr, ('dld', 'ddd'), None), (special.bdtrc, cython_special.bdtrc, ('dld', 'ddd'), None), (special.bdtri, cython_special.bdtri, ('dld', 'ddd'), None), (special.bdtrik, cython_special.bdtrik, ('ddd',), None), (special.bdtrin, cython_special.bdtrin, ('ddd',), None), (special.bei, cython_special.bei, ('d',), None), (special.beip, cython_special.beip, ('d',), None), (special.ber, cython_special.ber, ('d',), None), (special.berp, cython_special.berp, ('d',), None), (special.besselpoly, cython_special.besselpoly, ('ddd',), None), (special.beta, cython_special.beta, ('dd',), None), (special.betainc, cython_special.betainc, ('ddd',), None), (special.betaincinv, cython_special.betaincinv, ('ddd',), None), (special.betaln, cython_special.betaln, ('dd',), None), (special.binom, cython_special.binom, ('dd',), None), (special.boxcox, cython_special.boxcox, ('dd',), None), (special.boxcox1p, cython_special.boxcox1p, ('dd',), None), (special.btdtr, cython_special.btdtr, ('ddd',), None), (special.btdtri, cython_special.btdtri, ('ddd',), None), (special.btdtria, cython_special.btdtria, ('ddd',), None), (special.btdtrib, cython_special.btdtrib, ('ddd',), None), (special.cbrt, cython_special.cbrt, ('d',), None), (special.chdtr, cython_special.chdtr, ('dd',), None), (special.chdtrc, cython_special.chdtrc, ('dd',), None), (special.chdtri, cython_special.chdtri, ('dd',), None), (special.chdtriv, cython_special.chdtriv, ('dd',), None), (special.chndtr, cython_special.chndtr, ('ddd',), None), (special.chndtridf, cython_special.chndtridf, ('ddd',), None), (special.chndtrinc, cython_special.chndtrinc, ('ddd',), None), (special.chndtrix, cython_special.chndtrix, ('ddd',), None), (special.cosdg, cython_special.cosdg, ('d',), None), (special.cosm1, cython_special.cosm1, ('d',), None), (special.cotdg, cython_special.cotdg, ('d',), None), (special.dawsn, cython_special.dawsn, ('d', 'D'), None), (special.ellipe, cython_special.ellipe, ('d',), None), (special.ellipeinc, cython_special.ellipeinc, ('dd',), None), (special.ellipj, cython_special._ellipj_pywrap, ('dd',), None), (special.ellipkinc, cython_special.ellipkinc, ('dd',), None), (special.ellipkm1, cython_special.ellipkm1, ('d',), None), (special.ellipk, cython_special.ellipk, ('d',), None), (special.entr, cython_special.entr, ('d',), None), (special.erf, cython_special.erf, ('d', 'D'), None), (special.erfc, cython_special.erfc, ('d', 'D'), None), (special.erfcx, cython_special.erfcx, ('d', 'D'), None), (special.erfi, cython_special.erfi, ('d', 'D'), None), (special.erfinv, cython_special.erfinv, ('d',), None), (special.erfcinv, cython_special.erfcinv, ('d',), None), (special.eval_chebyc, cython_special.eval_chebyc, ('dd', 'dD', 'ld'), None), (special.eval_chebys, cython_special.eval_chebys, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyt, cython_special.eval_chebyt, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyu, cython_special.eval_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_gegenbauer, cython_special.eval_gegenbauer, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_genlaguerre, cython_special.eval_genlaguerre, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_hermite, cython_special.eval_hermite, ('ld',), None), (special.eval_hermitenorm, cython_special.eval_hermitenorm, ('ld',), None), (special.eval_jacobi, cython_special.eval_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_laguerre, cython_special.eval_laguerre, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_legendre, cython_special.eval_legendre, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyt, cython_special.eval_sh_chebyt, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyu, cython_special.eval_sh_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_sh_jacobi, cython_special.eval_sh_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_sh_legendre, cython_special.eval_sh_legendre, ('dd', 'dD', 'ld'), None), (special.exp1, cython_special.exp1, ('d', 'D'), None), (special.exp10, cython_special.exp10, ('d',), None), (special.exp2, cython_special.exp2, ('d',), None), (special.expi, cython_special.expi, ('d', 'D'), None), (special.expit, cython_special.expit, ('f', 'd', 'g'), None), (special.expm1, cython_special.expm1, ('d', 'D'), None), (special.expn, cython_special.expn, ('ld', 'dd'), None), (special.exprel, cython_special.exprel, ('d',), None), (special.fdtr, cython_special.fdtr, ('ddd',), None), (special.fdtrc, cython_special.fdtrc, ('ddd',), None), (special.fdtri, cython_special.fdtri, ('ddd',), None), (special.fdtridfd, cython_special.fdtridfd, ('ddd',), None), (special.fresnel, cython_special._fresnel_pywrap, ('d', 'D'), None), (special.gamma, cython_special.gamma, ('d', 'D'), None), (special.gammainc, cython_special.gammainc, ('dd',), None), (special.gammaincc, cython_special.gammaincc, ('dd',), None), (special.gammainccinv, cython_special.gammainccinv, ('dd',), None), (special.gammaincinv, cython_special.gammaincinv, ('dd',), None), (special.gammaln, cython_special.gammaln, ('d',), None), (special.gammasgn, cython_special.gammasgn, ('d',), None), (special.gdtr, cython_special.gdtr, ('ddd',), None), (special.gdtrc, cython_special.gdtrc, ('ddd',), None), (special.gdtria, cython_special.gdtria, ('ddd',), None), (special.gdtrib, cython_special.gdtrib, ('ddd',), None), (special.gdtrix, cython_special.gdtrix, ('ddd',), None), (special.hankel1, cython_special.hankel1, ('dD',), None), (special.hankel1e, cython_special.hankel1e, ('dD',), None), (special.hankel2, cython_special.hankel2, ('dD',), None), (special.hankel2e, cython_special.hankel2e, ('dD',), None), (special.huber, cython_special.huber, ('dd',), None), (special.hyp0f1, cython_special.hyp0f1, ('dd', 'dD'), None), (special.hyp1f1, cython_special.hyp1f1, ('ddd', 'ddD'), None), (special.hyp2f1, cython_special.hyp2f1, ('dddd', 'dddD'), None), (special.hyperu, cython_special.hyperu, ('ddd',), None), (special.i0, cython_special.i0, ('d',), None), (special.i0e, cython_special.i0e, ('d',), None), (special.i1, cython_special.i1, ('d',), None), (special.i1e, cython_special.i1e, ('d',), None), (special.inv_boxcox, cython_special.inv_boxcox, ('dd',), None), (special.inv_boxcox1p, cython_special.inv_boxcox1p, ('dd',), None), (special.it2i0k0, cython_special._it2i0k0_pywrap, ('d',), None), (special.it2j0y0, cython_special._it2j0y0_pywrap, ('d',), None), (special.it2struve0, cython_special.it2struve0, ('d',), None), (special.itairy, cython_special._itairy_pywrap, ('d',), None), (special.iti0k0, cython_special._iti0k0_pywrap, ('d',), None), (special.itj0y0, cython_special._itj0y0_pywrap, ('d',), None), (special.itmodstruve0, cython_special.itmodstruve0, ('d',), None), (special.itstruve0, cython_special.itstruve0, ('d',), None), (special.iv, cython_special.iv, ('dd', 'dD'), None), (special.ive, cython_special.ive, ('dd', 'dD'), None), (special.j0, cython_special.j0, ('d',), None), (special.j1, cython_special.j1, ('d',), None), (special.jv, cython_special.jv, ('dd', 'dD'), None), (special.jve, cython_special.jve, ('dd', 'dD'), None), (special.k0, cython_special.k0, ('d',), None), (special.k0e, cython_special.k0e, ('d',), None), (special.k1, cython_special.k1, ('d',), None), (special.k1e, cython_special.k1e, ('d',), None), (special.kei, cython_special.kei, ('d',), None), (special.keip, cython_special.keip, ('d',), None), (special.kelvin, cython_special._kelvin_pywrap, ('d',), None), (special.ker, cython_special.ker, ('d',), None), (special.kerp, cython_special.kerp, ('d',), None), (special.kl_div, cython_special.kl_div, ('dd',), None), (special.kn, cython_special.kn, ('ld', 'dd'), None), (special.kolmogi, cython_special.kolmogi, ('d',), None), (special.kolmogorov, cython_special.kolmogorov, ('d',), None), (special.kv, cython_special.kv, ('dd', 'dD'), None), (special.kve, cython_special.kve, ('dd', 'dD'), None), (special.log1p, cython_special.log1p, ('d', 'D'), None), (special.log_ndtr, cython_special.log_ndtr, ('d', 'D'), None), (special.ndtri_exp, cython_special.ndtri_exp, ('d',), None), (special.loggamma, cython_special.loggamma, ('D',), None), (special.logit, cython_special.logit, ('f', 'd', 'g'), None), (special.lpmv, cython_special.lpmv, ('ddd',), None), (special.mathieu_a, cython_special.mathieu_a, ('dd',), None), (special.mathieu_b, cython_special.mathieu_b, ('dd',), None), (special.mathieu_cem, cython_special._mathieu_cem_pywrap, ('ddd',), None), (special.mathieu_modcem1, cython_special._mathieu_modcem1_pywrap, ('ddd',), None), (special.mathieu_modcem2, cython_special._mathieu_modcem2_pywrap, ('ddd',), None), (special.mathieu_modsem1, cython_special._mathieu_modsem1_pywrap, ('ddd',), None), (special.mathieu_modsem2, cython_special._mathieu_modsem2_pywrap, ('ddd',), None), (special.mathieu_sem, cython_special._mathieu_sem_pywrap, ('ddd',), None), (special.modfresnelm, cython_special._modfresnelm_pywrap, ('d',), None), (special.modfresnelp, cython_special._modfresnelp_pywrap, ('d',), None), (special.modstruve, cython_special.modstruve, ('dd',), None), (special.nbdtr, cython_special.nbdtr, ('lld', 'ddd'), None), (special.nbdtrc, cython_special.nbdtrc, ('lld', 'ddd'), None), (special.nbdtri, cython_special.nbdtri, ('lld', 'ddd'), None), (special.nbdtrik, cython_special.nbdtrik, ('ddd',), None), (special.nbdtrin, cython_special.nbdtrin, ('ddd',), None), (special.ncfdtr, cython_special.ncfdtr, ('dddd',), None), (special.ncfdtri, cython_special.ncfdtri, ('dddd',), None), (special.ncfdtridfd, cython_special.ncfdtridfd, ('dddd',), None), (special.ncfdtridfn, cython_special.ncfdtridfn, ('dddd',), None), (special.ncfdtrinc, cython_special.ncfdtrinc, ('dddd',), None), (special.nctdtr, cython_special.nctdtr, ('ddd',), None), (special.nctdtridf, cython_special.nctdtridf, ('ddd',), None), (special.nctdtrinc, cython_special.nctdtrinc, ('ddd',), None), (special.nctdtrit, cython_special.nctdtrit, ('ddd',), None), (special.ndtr, cython_special.ndtr, ('d', 'D'), None), (special.ndtri, cython_special.ndtri, ('d',), None), (special.nrdtrimn, cython_special.nrdtrimn, ('ddd',), None), (special.nrdtrisd, cython_special.nrdtrisd, ('ddd',), None), (special.obl_ang1, cython_special._obl_ang1_pywrap, ('dddd',), None), (special.obl_ang1_cv, cython_special._obl_ang1_cv_pywrap, ('ddddd',), None), (special.obl_cv, cython_special.obl_cv, ('ddd',), None), (special.obl_rad1, cython_special._obl_rad1_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad1_cv, cython_special._obl_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.obl_rad2, cython_special._obl_rad2_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad2_cv, cython_special._obl_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pbdv, cython_special._pbdv_pywrap, ('dd',), None), (special.pbvv, cython_special._pbvv_pywrap, ('dd',), None), (special.pbwa, cython_special._pbwa_pywrap, ('dd',), None), (special.pdtr, cython_special.pdtr, ('dd', 'dd'), None), (special.pdtrc, cython_special.pdtrc, ('dd', 'dd'), None), (special.pdtri, cython_special.pdtri, ('ld', 'dd'), None), (special.pdtrik, cython_special.pdtrik, ('dd',), None), (special.poch, cython_special.poch, ('dd',), None), (special.pro_ang1, cython_special._pro_ang1_pywrap, ('dddd',), None), (special.pro_ang1_cv, cython_special._pro_ang1_cv_pywrap, ('ddddd',), None), (special.pro_cv, cython_special.pro_cv, ('ddd',), None), (special.pro_rad1, cython_special._pro_rad1_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad1_cv, cython_special._pro_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pro_rad2, cython_special._pro_rad2_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad2_cv, cython_special._pro_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pseudo_huber, cython_special.pseudo_huber, ('dd',), None), (special.psi, cython_special.psi, ('d', 'D'), None), (special.radian, cython_special.radian, ('ddd',), None), (special.rel_entr, cython_special.rel_entr, ('dd',), None), (special.rgamma, cython_special.rgamma, ('d', 'D'), None), (special.round, cython_special.round, ('d',), None), (special.spherical_jn, cython_special.spherical_jn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_yn, cython_special.spherical_yn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_in, cython_special.spherical_in, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_kn, cython_special.spherical_kn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.shichi, cython_special._shichi_pywrap, ('d', 'D'), None), (special.sici, cython_special._sici_pywrap, ('d', 'D'), None), (special.sindg, cython_special.sindg, ('d',), None), (special.smirnov, cython_special.smirnov, ('ld', 'dd'), None), (special.smirnovi, cython_special.smirnovi, ('ld', 'dd'), None), (special.spence, cython_special.spence, ('d', 'D'), None), (special.sph_harm, cython_special.sph_harm, ('lldd', 'dddd'), None), (special.stdtr, cython_special.stdtr, ('dd',), None), (special.stdtridf, cython_special.stdtridf, ('dd',), None), (special.stdtrit, cython_special.stdtrit, ('dd',), None), (special.struve, cython_special.struve, ('dd',), None), (special.tandg, cython_special.tandg, ('d',), None), (special.tklmbda, cython_special.tklmbda, ('dd',), None), (special.voigt_profile, cython_special.voigt_profile, ('ddd',), None), (special.wofz, cython_special.wofz, ('D',), None), (special.wright_bessel, cython_special.wright_bessel, ('ddd',), None), (special.wrightomega, cython_special.wrightomega, ('D',), None), (special.xlog1py, cython_special.xlog1py, ('dd', 'DD'), None), (special.xlogy, cython_special.xlogy, ('dd', 'DD'), None), (special.y0, cython_special.y0, ('d',), None), (special.y1, cython_special.y1, ('d',), None), (special.yn, cython_special.yn, ('ld', 'dd'), None), (special.yv, cython_special.yv, ('dd', 'dD'), None), (special.yve, cython_special.yve, ('dd', 'dD'), None), (special.zetac, cython_special.zetac, ('d',), None), (special.owens_t, cython_special.owens_t, ('dd',), None) ] IDS = [x[0].__name__ for x in PARAMS] def _generate_test_points(typecodes): axes = tuple(TEST_POINTS[x] for x in typecodes) pts = list(product(*axes)) return pts def test_cython_api_completeness(): # Check that everything is tested for name in dir(cython_special): func = getattr(cython_special, name) if callable(func) and not name.startswith('_'): for _, cyfun, _, _ in PARAMS: if cyfun is func: break else: raise RuntimeError(f"{name} missing from tests!") @pytest.mark.parametrize("param", PARAMS, ids=IDS) def test_cython_api(param): pyfunc, cyfunc, specializations, knownfailure = param if knownfailure: pytest.xfail(reason=knownfailure) # Check which parameters are expected to be fused types max_params = max(len(spec) for spec in specializations) values = [set() for _ in range(max_params)] for typecodes in specializations: for j, v in enumerate(typecodes): values[j].add(v) seen = set() is_fused_code = [False] * len(values) for j, v in enumerate(values): vv = tuple(sorted(v)) if vv in seen: continue is_fused_code[j] = (len(v) > 1) seen.add(vv) # Check results for typecodes in specializations: # Pick the correct specialized function signature = [CYTHON_SIGNATURE_MAP[code] for j, code in enumerate(typecodes) if is_fused_code[j]] if signature: cy_spec_func = cyfunc[tuple(signature)] else: signature = None cy_spec_func = cyfunc # Test it pts = _generate_test_points(typecodes) for pt in pts: with suppress_warnings() as sup: sup.filter(DeprecationWarning) pyval = pyfunc(*pt) cyval = cy_spec_func(*pt) assert_allclose(cyval, pyval, err_msg="{} {} {}".format(pt, typecodes, signature))
endolith/scipy
scipy/special/tests/test_cython_special.py
scipy/stats/_generate_pyx.py
from functools import update_wrapper, lru_cache from ._pocketfft import helper as _helper def next_fast_len(target, real=False): """Find the next fast size of input data to ``fft``, for zero-padding, etc. SciPy's FFT algorithms gain their speed by a recursive divide and conquer strategy. This relies on efficient functions for small prime factors of the input length. Thus, the transforms are fastest when using composites of the prime factors handled by the fft implementation. If there are efficient functions for all radices <= `n`, then the result will be a number `x` >= ``target`` with only prime factors < `n`. (Also known as `n`-smooth numbers) Parameters ---------- target : int Length to start searching from. Must be a positive integer. real : bool, optional True if the FFT involves real input or output (e.g., `rfft` or `hfft` but not `fft`). Defaults to False. Returns ------- out : int The smallest fast length greater than or equal to ``target``. Notes ----- The result of this function may change in future as performance considerations change, for example, if new prime factors are added. Calling `fft` or `ifft` with real input data performs an ``'R2C'`` transform internally. Examples -------- On a particular machine, an FFT of prime length takes 11.4 ms: >>> from scipy import fft >>> rng = np.random.default_rng() >>> min_len = 93059 # prime length is worst case for speed >>> a = rng.standard_normal(min_len) >>> b = fft.fft(a) Zero-padding to the next regular length reduces computation time to 1.6 ms, a speedup of 7.3 times: >>> fft.next_fast_len(min_len, real=True) 93312 >>> b = fft.fft(a, 93312) Rounding up to the next power of 2 is not optimal, taking 3.0 ms to compute; 1.9 times longer than the size given by ``next_fast_len``: >>> b = fft.fft(a, 131072) """ pass # Directly wrap the c-function good_size but take the docstring etc., from the # next_fast_len function above next_fast_len = update_wrapper(lru_cache()(_helper.good_size), next_fast_len) next_fast_len.__wrapped__ = _helper.good_size def _init_nd_shape_and_axes(x, shape, axes): """Handle shape and axes arguments for N-D transforms. Returns the shape and axes in a standard form, taking into account negative values and checking for various potential errors. Parameters ---------- x : array_like The input array. shape : int or array_like of ints or None The shape of the result. If both `shape` and `axes` (see below) are None, `shape` is ``x.shape``; if `shape` is None but `axes` is not None, then `shape` is ``numpy.take(x.shape, axes, axis=0)``. If `shape` is -1, the size of the corresponding dimension of `x` is used. axes : int or array_like of ints or None Axes along which the calculation is computed. The default is over all axes. Negative indices are automatically converted to their positive counterparts. Returns ------- shape : array The shape of the result. It is a 1-D integer array. axes : array The shape of the result. It is a 1-D integer array. """ return _helper._init_nd_shape_and_axes(x, shape, axes)
from __future__ import annotations from typing import List, Tuple, Callable, Optional import pytest from itertools import product from numpy.testing import assert_allclose, suppress_warnings from scipy import special from scipy.special import cython_special bint_points = [True, False] int_points = [-10, -1, 1, 10] real_points = [-10.0, -1.0, 1.0, 10.0] complex_points = [complex(*tup) for tup in product(real_points, repeat=2)] CYTHON_SIGNATURE_MAP = { 'b': 'bint', 'f': 'float', 'd': 'double', 'g': 'long double', 'F': 'float complex', 'D': 'double complex', 'G': 'long double complex', 'i': 'int', 'l': 'long' } TEST_POINTS = { 'b': bint_points, 'f': real_points, 'd': real_points, 'g': real_points, 'F': complex_points, 'D': complex_points, 'G': complex_points, 'i': int_points, 'l': int_points, } PARAMS: List[Tuple[Callable, Callable, Tuple[str, ...], Optional[str]]] = [ (special.agm, cython_special.agm, ('dd',), None), (special.airy, cython_special._airy_pywrap, ('d', 'D'), None), (special.airye, cython_special._airye_pywrap, ('d', 'D'), None), (special.bdtr, cython_special.bdtr, ('dld', 'ddd'), None), (special.bdtrc, cython_special.bdtrc, ('dld', 'ddd'), None), (special.bdtri, cython_special.bdtri, ('dld', 'ddd'), None), (special.bdtrik, cython_special.bdtrik, ('ddd',), None), (special.bdtrin, cython_special.bdtrin, ('ddd',), None), (special.bei, cython_special.bei, ('d',), None), (special.beip, cython_special.beip, ('d',), None), (special.ber, cython_special.ber, ('d',), None), (special.berp, cython_special.berp, ('d',), None), (special.besselpoly, cython_special.besselpoly, ('ddd',), None), (special.beta, cython_special.beta, ('dd',), None), (special.betainc, cython_special.betainc, ('ddd',), None), (special.betaincinv, cython_special.betaincinv, ('ddd',), None), (special.betaln, cython_special.betaln, ('dd',), None), (special.binom, cython_special.binom, ('dd',), None), (special.boxcox, cython_special.boxcox, ('dd',), None), (special.boxcox1p, cython_special.boxcox1p, ('dd',), None), (special.btdtr, cython_special.btdtr, ('ddd',), None), (special.btdtri, cython_special.btdtri, ('ddd',), None), (special.btdtria, cython_special.btdtria, ('ddd',), None), (special.btdtrib, cython_special.btdtrib, ('ddd',), None), (special.cbrt, cython_special.cbrt, ('d',), None), (special.chdtr, cython_special.chdtr, ('dd',), None), (special.chdtrc, cython_special.chdtrc, ('dd',), None), (special.chdtri, cython_special.chdtri, ('dd',), None), (special.chdtriv, cython_special.chdtriv, ('dd',), None), (special.chndtr, cython_special.chndtr, ('ddd',), None), (special.chndtridf, cython_special.chndtridf, ('ddd',), None), (special.chndtrinc, cython_special.chndtrinc, ('ddd',), None), (special.chndtrix, cython_special.chndtrix, ('ddd',), None), (special.cosdg, cython_special.cosdg, ('d',), None), (special.cosm1, cython_special.cosm1, ('d',), None), (special.cotdg, cython_special.cotdg, ('d',), None), (special.dawsn, cython_special.dawsn, ('d', 'D'), None), (special.ellipe, cython_special.ellipe, ('d',), None), (special.ellipeinc, cython_special.ellipeinc, ('dd',), None), (special.ellipj, cython_special._ellipj_pywrap, ('dd',), None), (special.ellipkinc, cython_special.ellipkinc, ('dd',), None), (special.ellipkm1, cython_special.ellipkm1, ('d',), None), (special.ellipk, cython_special.ellipk, ('d',), None), (special.entr, cython_special.entr, ('d',), None), (special.erf, cython_special.erf, ('d', 'D'), None), (special.erfc, cython_special.erfc, ('d', 'D'), None), (special.erfcx, cython_special.erfcx, ('d', 'D'), None), (special.erfi, cython_special.erfi, ('d', 'D'), None), (special.erfinv, cython_special.erfinv, ('d',), None), (special.erfcinv, cython_special.erfcinv, ('d',), None), (special.eval_chebyc, cython_special.eval_chebyc, ('dd', 'dD', 'ld'), None), (special.eval_chebys, cython_special.eval_chebys, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyt, cython_special.eval_chebyt, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyu, cython_special.eval_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_gegenbauer, cython_special.eval_gegenbauer, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_genlaguerre, cython_special.eval_genlaguerre, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_hermite, cython_special.eval_hermite, ('ld',), None), (special.eval_hermitenorm, cython_special.eval_hermitenorm, ('ld',), None), (special.eval_jacobi, cython_special.eval_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_laguerre, cython_special.eval_laguerre, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_legendre, cython_special.eval_legendre, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyt, cython_special.eval_sh_chebyt, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyu, cython_special.eval_sh_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_sh_jacobi, cython_special.eval_sh_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_sh_legendre, cython_special.eval_sh_legendre, ('dd', 'dD', 'ld'), None), (special.exp1, cython_special.exp1, ('d', 'D'), None), (special.exp10, cython_special.exp10, ('d',), None), (special.exp2, cython_special.exp2, ('d',), None), (special.expi, cython_special.expi, ('d', 'D'), None), (special.expit, cython_special.expit, ('f', 'd', 'g'), None), (special.expm1, cython_special.expm1, ('d', 'D'), None), (special.expn, cython_special.expn, ('ld', 'dd'), None), (special.exprel, cython_special.exprel, ('d',), None), (special.fdtr, cython_special.fdtr, ('ddd',), None), (special.fdtrc, cython_special.fdtrc, ('ddd',), None), (special.fdtri, cython_special.fdtri, ('ddd',), None), (special.fdtridfd, cython_special.fdtridfd, ('ddd',), None), (special.fresnel, cython_special._fresnel_pywrap, ('d', 'D'), None), (special.gamma, cython_special.gamma, ('d', 'D'), None), (special.gammainc, cython_special.gammainc, ('dd',), None), (special.gammaincc, cython_special.gammaincc, ('dd',), None), (special.gammainccinv, cython_special.gammainccinv, ('dd',), None), (special.gammaincinv, cython_special.gammaincinv, ('dd',), None), (special.gammaln, cython_special.gammaln, ('d',), None), (special.gammasgn, cython_special.gammasgn, ('d',), None), (special.gdtr, cython_special.gdtr, ('ddd',), None), (special.gdtrc, cython_special.gdtrc, ('ddd',), None), (special.gdtria, cython_special.gdtria, ('ddd',), None), (special.gdtrib, cython_special.gdtrib, ('ddd',), None), (special.gdtrix, cython_special.gdtrix, ('ddd',), None), (special.hankel1, cython_special.hankel1, ('dD',), None), (special.hankel1e, cython_special.hankel1e, ('dD',), None), (special.hankel2, cython_special.hankel2, ('dD',), None), (special.hankel2e, cython_special.hankel2e, ('dD',), None), (special.huber, cython_special.huber, ('dd',), None), (special.hyp0f1, cython_special.hyp0f1, ('dd', 'dD'), None), (special.hyp1f1, cython_special.hyp1f1, ('ddd', 'ddD'), None), (special.hyp2f1, cython_special.hyp2f1, ('dddd', 'dddD'), None), (special.hyperu, cython_special.hyperu, ('ddd',), None), (special.i0, cython_special.i0, ('d',), None), (special.i0e, cython_special.i0e, ('d',), None), (special.i1, cython_special.i1, ('d',), None), (special.i1e, cython_special.i1e, ('d',), None), (special.inv_boxcox, cython_special.inv_boxcox, ('dd',), None), (special.inv_boxcox1p, cython_special.inv_boxcox1p, ('dd',), None), (special.it2i0k0, cython_special._it2i0k0_pywrap, ('d',), None), (special.it2j0y0, cython_special._it2j0y0_pywrap, ('d',), None), (special.it2struve0, cython_special.it2struve0, ('d',), None), (special.itairy, cython_special._itairy_pywrap, ('d',), None), (special.iti0k0, cython_special._iti0k0_pywrap, ('d',), None), (special.itj0y0, cython_special._itj0y0_pywrap, ('d',), None), (special.itmodstruve0, cython_special.itmodstruve0, ('d',), None), (special.itstruve0, cython_special.itstruve0, ('d',), None), (special.iv, cython_special.iv, ('dd', 'dD'), None), (special.ive, cython_special.ive, ('dd', 'dD'), None), (special.j0, cython_special.j0, ('d',), None), (special.j1, cython_special.j1, ('d',), None), (special.jv, cython_special.jv, ('dd', 'dD'), None), (special.jve, cython_special.jve, ('dd', 'dD'), None), (special.k0, cython_special.k0, ('d',), None), (special.k0e, cython_special.k0e, ('d',), None), (special.k1, cython_special.k1, ('d',), None), (special.k1e, cython_special.k1e, ('d',), None), (special.kei, cython_special.kei, ('d',), None), (special.keip, cython_special.keip, ('d',), None), (special.kelvin, cython_special._kelvin_pywrap, ('d',), None), (special.ker, cython_special.ker, ('d',), None), (special.kerp, cython_special.kerp, ('d',), None), (special.kl_div, cython_special.kl_div, ('dd',), None), (special.kn, cython_special.kn, ('ld', 'dd'), None), (special.kolmogi, cython_special.kolmogi, ('d',), None), (special.kolmogorov, cython_special.kolmogorov, ('d',), None), (special.kv, cython_special.kv, ('dd', 'dD'), None), (special.kve, cython_special.kve, ('dd', 'dD'), None), (special.log1p, cython_special.log1p, ('d', 'D'), None), (special.log_ndtr, cython_special.log_ndtr, ('d', 'D'), None), (special.ndtri_exp, cython_special.ndtri_exp, ('d',), None), (special.loggamma, cython_special.loggamma, ('D',), None), (special.logit, cython_special.logit, ('f', 'd', 'g'), None), (special.lpmv, cython_special.lpmv, ('ddd',), None), (special.mathieu_a, cython_special.mathieu_a, ('dd',), None), (special.mathieu_b, cython_special.mathieu_b, ('dd',), None), (special.mathieu_cem, cython_special._mathieu_cem_pywrap, ('ddd',), None), (special.mathieu_modcem1, cython_special._mathieu_modcem1_pywrap, ('ddd',), None), (special.mathieu_modcem2, cython_special._mathieu_modcem2_pywrap, ('ddd',), None), (special.mathieu_modsem1, cython_special._mathieu_modsem1_pywrap, ('ddd',), None), (special.mathieu_modsem2, cython_special._mathieu_modsem2_pywrap, ('ddd',), None), (special.mathieu_sem, cython_special._mathieu_sem_pywrap, ('ddd',), None), (special.modfresnelm, cython_special._modfresnelm_pywrap, ('d',), None), (special.modfresnelp, cython_special._modfresnelp_pywrap, ('d',), None), (special.modstruve, cython_special.modstruve, ('dd',), None), (special.nbdtr, cython_special.nbdtr, ('lld', 'ddd'), None), (special.nbdtrc, cython_special.nbdtrc, ('lld', 'ddd'), None), (special.nbdtri, cython_special.nbdtri, ('lld', 'ddd'), None), (special.nbdtrik, cython_special.nbdtrik, ('ddd',), None), (special.nbdtrin, cython_special.nbdtrin, ('ddd',), None), (special.ncfdtr, cython_special.ncfdtr, ('dddd',), None), (special.ncfdtri, cython_special.ncfdtri, ('dddd',), None), (special.ncfdtridfd, cython_special.ncfdtridfd, ('dddd',), None), (special.ncfdtridfn, cython_special.ncfdtridfn, ('dddd',), None), (special.ncfdtrinc, cython_special.ncfdtrinc, ('dddd',), None), (special.nctdtr, cython_special.nctdtr, ('ddd',), None), (special.nctdtridf, cython_special.nctdtridf, ('ddd',), None), (special.nctdtrinc, cython_special.nctdtrinc, ('ddd',), None), (special.nctdtrit, cython_special.nctdtrit, ('ddd',), None), (special.ndtr, cython_special.ndtr, ('d', 'D'), None), (special.ndtri, cython_special.ndtri, ('d',), None), (special.nrdtrimn, cython_special.nrdtrimn, ('ddd',), None), (special.nrdtrisd, cython_special.nrdtrisd, ('ddd',), None), (special.obl_ang1, cython_special._obl_ang1_pywrap, ('dddd',), None), (special.obl_ang1_cv, cython_special._obl_ang1_cv_pywrap, ('ddddd',), None), (special.obl_cv, cython_special.obl_cv, ('ddd',), None), (special.obl_rad1, cython_special._obl_rad1_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad1_cv, cython_special._obl_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.obl_rad2, cython_special._obl_rad2_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad2_cv, cython_special._obl_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pbdv, cython_special._pbdv_pywrap, ('dd',), None), (special.pbvv, cython_special._pbvv_pywrap, ('dd',), None), (special.pbwa, cython_special._pbwa_pywrap, ('dd',), None), (special.pdtr, cython_special.pdtr, ('dd', 'dd'), None), (special.pdtrc, cython_special.pdtrc, ('dd', 'dd'), None), (special.pdtri, cython_special.pdtri, ('ld', 'dd'), None), (special.pdtrik, cython_special.pdtrik, ('dd',), None), (special.poch, cython_special.poch, ('dd',), None), (special.pro_ang1, cython_special._pro_ang1_pywrap, ('dddd',), None), (special.pro_ang1_cv, cython_special._pro_ang1_cv_pywrap, ('ddddd',), None), (special.pro_cv, cython_special.pro_cv, ('ddd',), None), (special.pro_rad1, cython_special._pro_rad1_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad1_cv, cython_special._pro_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pro_rad2, cython_special._pro_rad2_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad2_cv, cython_special._pro_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pseudo_huber, cython_special.pseudo_huber, ('dd',), None), (special.psi, cython_special.psi, ('d', 'D'), None), (special.radian, cython_special.radian, ('ddd',), None), (special.rel_entr, cython_special.rel_entr, ('dd',), None), (special.rgamma, cython_special.rgamma, ('d', 'D'), None), (special.round, cython_special.round, ('d',), None), (special.spherical_jn, cython_special.spherical_jn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_yn, cython_special.spherical_yn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_in, cython_special.spherical_in, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_kn, cython_special.spherical_kn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.shichi, cython_special._shichi_pywrap, ('d', 'D'), None), (special.sici, cython_special._sici_pywrap, ('d', 'D'), None), (special.sindg, cython_special.sindg, ('d',), None), (special.smirnov, cython_special.smirnov, ('ld', 'dd'), None), (special.smirnovi, cython_special.smirnovi, ('ld', 'dd'), None), (special.spence, cython_special.spence, ('d', 'D'), None), (special.sph_harm, cython_special.sph_harm, ('lldd', 'dddd'), None), (special.stdtr, cython_special.stdtr, ('dd',), None), (special.stdtridf, cython_special.stdtridf, ('dd',), None), (special.stdtrit, cython_special.stdtrit, ('dd',), None), (special.struve, cython_special.struve, ('dd',), None), (special.tandg, cython_special.tandg, ('d',), None), (special.tklmbda, cython_special.tklmbda, ('dd',), None), (special.voigt_profile, cython_special.voigt_profile, ('ddd',), None), (special.wofz, cython_special.wofz, ('D',), None), (special.wright_bessel, cython_special.wright_bessel, ('ddd',), None), (special.wrightomega, cython_special.wrightomega, ('D',), None), (special.xlog1py, cython_special.xlog1py, ('dd', 'DD'), None), (special.xlogy, cython_special.xlogy, ('dd', 'DD'), None), (special.y0, cython_special.y0, ('d',), None), (special.y1, cython_special.y1, ('d',), None), (special.yn, cython_special.yn, ('ld', 'dd'), None), (special.yv, cython_special.yv, ('dd', 'dD'), None), (special.yve, cython_special.yve, ('dd', 'dD'), None), (special.zetac, cython_special.zetac, ('d',), None), (special.owens_t, cython_special.owens_t, ('dd',), None) ] IDS = [x[0].__name__ for x in PARAMS] def _generate_test_points(typecodes): axes = tuple(TEST_POINTS[x] for x in typecodes) pts = list(product(*axes)) return pts def test_cython_api_completeness(): # Check that everything is tested for name in dir(cython_special): func = getattr(cython_special, name) if callable(func) and not name.startswith('_'): for _, cyfun, _, _ in PARAMS: if cyfun is func: break else: raise RuntimeError(f"{name} missing from tests!") @pytest.mark.parametrize("param", PARAMS, ids=IDS) def test_cython_api(param): pyfunc, cyfunc, specializations, knownfailure = param if knownfailure: pytest.xfail(reason=knownfailure) # Check which parameters are expected to be fused types max_params = max(len(spec) for spec in specializations) values = [set() for _ in range(max_params)] for typecodes in specializations: for j, v in enumerate(typecodes): values[j].add(v) seen = set() is_fused_code = [False] * len(values) for j, v in enumerate(values): vv = tuple(sorted(v)) if vv in seen: continue is_fused_code[j] = (len(v) > 1) seen.add(vv) # Check results for typecodes in specializations: # Pick the correct specialized function signature = [CYTHON_SIGNATURE_MAP[code] for j, code in enumerate(typecodes) if is_fused_code[j]] if signature: cy_spec_func = cyfunc[tuple(signature)] else: signature = None cy_spec_func = cyfunc # Test it pts = _generate_test_points(typecodes) for pt in pts: with suppress_warnings() as sup: sup.filter(DeprecationWarning) pyval = pyfunc(*pt) cyval = cy_spec_func(*pt) assert_allclose(cyval, pyval, err_msg="{} {} {}".format(pt, typecodes, signature))
endolith/scipy
scipy/special/tests/test_cython_special.py
scipy/fft/_helper.py
import operator from dataclasses import dataclass import numpy as np from scipy.special import ndtri from ._common import ConfidenceInterval def _validate_int(n, bound, name): msg = f'{name} must be an integer not less than {bound}, but got {n!r}' try: n = operator.index(n) except TypeError: raise TypeError(msg) from None if n < bound: raise ValueError(msg) return n @dataclass class RelativeRiskResult: """ Result of `scipy.stats.contingency.relative_risk`. Attributes ---------- relative_risk : float This is:: (exposed_cases/exposed_total) / (control_cases/control_total) exposed_cases : int The number of "cases" (i.e. occurrence of disease or other event of interest) among the sample of "exposed" individuals. exposed_total : int The total number of "exposed" individuals in the sample. control_cases : int The number of "cases" among the sample of "control" or non-exposed individuals. control_total : int The total number of "control" individuals in the sample. Methods ------- confidence_interval : Compute the confidence interval for the relative risk estimate. """ relative_risk: float exposed_cases: int exposed_total: int control_cases: int control_total: int def confidence_interval(self, confidence_level=0.95): """ Compute the confidence interval for the relative risk. The confidence interval is computed using the Katz method (i.e. "Method C" of [1]_; see also [2]_, section 3.1.2). Parameters ---------- confidence_level : float, optional The confidence level to use for the confidence interval. Default is 0.95. Returns ------- ci : ConfidenceInterval instance The return value is an object with attributes ``low`` and ``high`` that hold the confidence interval. References ---------- .. [1] D. Katz, J. Baptista, S. P. Azen and M. C. Pike, "Obtaining confidence intervals for the risk ratio in cohort studies", Biometrics, 34, 469-474 (1978). .. [2] Hardeo Sahai and Anwer Khurshid, Statistics in Epidemiology, CRC Press LLC, Boca Raton, FL, USA (1996). Examples -------- >>> from scipy.stats.contingency import relative_risk >>> result = relative_risk(exposed_cases=10, exposed_total=75, ... control_cases=12, control_total=225) >>> result.relative_risk 2.5 >>> result.confidence_interval() ConfidenceInterval(low=1.1261564003469628, high=5.549850800541033) """ if not 0 <= confidence_level <= 1: raise ValueError('confidence_level must be in the interval ' '[0, 1].') # Handle edge cases where either exposed_cases or control_cases # is zero. We follow the convention of the R function riskratio # from the epitools library. if self.exposed_cases == 0 and self.control_cases == 0: # relative risk is nan. return ConfidenceInterval(low=np.nan, high=np.nan) elif self.exposed_cases == 0: # relative risk is 0. return ConfidenceInterval(low=0.0, high=np.nan) elif self.control_cases == 0: # relative risk is inf return ConfidenceInterval(low=np.nan, high=np.inf) alpha = 1 - confidence_level z = ndtri(1 - alpha/2) rr = self.relative_risk # Estimate of the variance of log(rr) is # var(log(rr)) = 1/exposed_cases - 1/exposed_total + # 1/control_cases - 1/control_total # and the standard error is the square root of that. se = np.sqrt(1/self.exposed_cases - 1/self.exposed_total + 1/self.control_cases - 1/self.control_total) delta = z*se katz_lo = rr*np.exp(-delta) katz_hi = rr*np.exp(delta) return ConfidenceInterval(low=katz_lo, high=katz_hi) def relative_risk(exposed_cases, exposed_total, control_cases, control_total): """ Compute the relative risk (also known as the risk ratio). This function computes the relative risk associated with a 2x2 contingency table ([1]_, section 2.2.3; [2]_, section 3.1.2). Instead of accepting a table as an argument, the individual numbers that are used to compute the relative risk are given as separate parameters. This is to avoid the ambiguity of which row or column of the contingency table corresponds to the "exposed" cases and which corresponds to the "control" cases. Unlike, say, the odds ratio, the relative risk is not invariant under an interchange of the rows or columns. Parameters ---------- exposed_cases : nonnegative int The number of "cases" (i.e. occurrence of disease or other event of interest) among the sample of "exposed" individuals. exposed_total : positive int The total number of "exposed" individuals in the sample. control_cases : nonnegative int The number of "cases" among the sample of "control" or non-exposed individuals. control_total : positive int The total number of "control" individuals in the sample. Returns ------- result : instance of `~scipy.stats._result_classes.RelativeRiskResult` The object has the float attribute ``relative_risk``, which is:: rr = (exposed_cases/exposed_total) / (control_cases/control_total) The object also has the method ``confidence_interval`` to compute the confidence interval of the relative risk for a given confidence level. Notes ----- The R package epitools has the function `riskratio`, which accepts a table with the following layout:: disease=0 disease=1 exposed=0 (ref) n00 n01 exposed=1 n10 n11 With a 2x2 table in the above format, the estimate of the CI is computed by `riskratio` when the argument method="wald" is given, or with the function `riskratio.wald`. For example, in a test of the incidence of lung cancer among a sample of smokers and nonsmokers, the "exposed" category would correspond to "is a smoker" and the "disease" category would correspond to "has or had lung cancer". To pass the same data to ``relative_risk``, use:: relative_risk(n11, n10 + n11, n01, n00 + n01) .. versionadded:: 1.7.0 References ---------- .. [1] Alan Agresti, An Introduction to Categorical Data Analysis (second edition), Wiley, Hoboken, NJ, USA (2007). .. [2] Hardeo Sahai and Anwer Khurshid, Statistics in Epidemiology, CRC Press LLC, Boca Raton, FL, USA (1996). Examples -------- >>> from scipy.stats.contingency import relative_risk This example is from Example 3.1 of [2]_. The results of a heart disease study are summarized in the following table:: High CAT Low CAT Total -------- ------- ----- CHD 27 44 71 No CHD 95 443 538 Total 122 487 609 CHD is coronary heart disease, and CAT refers to the level of circulating catecholamine. CAT is the "exposure" variable, and high CAT is the "exposed" category. So the data from the table to be passed to ``relative_risk`` is:: exposed_cases = 27 exposed_total = 122 control_cases = 44 control_total = 487 >>> result = relative_risk(27, 122, 44, 487) >>> result.relative_risk 2.4495156482861398 Find the confidence interval for the relative risk. >>> result.confidence_interval(confidence_level=0.95) ConfidenceInterval(low=1.5836990926700116, high=3.7886786315466354) The interval does not contain 1, so the data supports the statement that high CAT is associated with greater risk of CHD. """ # Relative risk is a trivial calculation. The nontrivial part is in the # `confidence_interval` method of the RelativeRiskResult class. exposed_cases = _validate_int(exposed_cases, 0, "exposed_cases") exposed_total = _validate_int(exposed_total, 1, "exposed_total") control_cases = _validate_int(control_cases, 0, "control_cases") control_total = _validate_int(control_total, 1, "control_total") if exposed_cases > exposed_total: raise ValueError('exposed_cases must not exceed exposed_total.') if control_cases > control_total: raise ValueError('control_cases must not exceed control_total.') if exposed_cases == 0 and control_cases == 0: # relative risk is 0/0. rr = np.nan elif exposed_cases == 0: # relative risk is 0/nonzero rr = 0.0 elif control_cases == 0: # relative risk is nonzero/0. rr = np.inf else: p1 = exposed_cases / exposed_total p2 = control_cases / control_total rr = p1 / p2 return RelativeRiskResult(relative_risk=rr, exposed_cases=exposed_cases, exposed_total=exposed_total, control_cases=control_cases, control_total=control_total)
from __future__ import annotations from typing import List, Tuple, Callable, Optional import pytest from itertools import product from numpy.testing import assert_allclose, suppress_warnings from scipy import special from scipy.special import cython_special bint_points = [True, False] int_points = [-10, -1, 1, 10] real_points = [-10.0, -1.0, 1.0, 10.0] complex_points = [complex(*tup) for tup in product(real_points, repeat=2)] CYTHON_SIGNATURE_MAP = { 'b': 'bint', 'f': 'float', 'd': 'double', 'g': 'long double', 'F': 'float complex', 'D': 'double complex', 'G': 'long double complex', 'i': 'int', 'l': 'long' } TEST_POINTS = { 'b': bint_points, 'f': real_points, 'd': real_points, 'g': real_points, 'F': complex_points, 'D': complex_points, 'G': complex_points, 'i': int_points, 'l': int_points, } PARAMS: List[Tuple[Callable, Callable, Tuple[str, ...], Optional[str]]] = [ (special.agm, cython_special.agm, ('dd',), None), (special.airy, cython_special._airy_pywrap, ('d', 'D'), None), (special.airye, cython_special._airye_pywrap, ('d', 'D'), None), (special.bdtr, cython_special.bdtr, ('dld', 'ddd'), None), (special.bdtrc, cython_special.bdtrc, ('dld', 'ddd'), None), (special.bdtri, cython_special.bdtri, ('dld', 'ddd'), None), (special.bdtrik, cython_special.bdtrik, ('ddd',), None), (special.bdtrin, cython_special.bdtrin, ('ddd',), None), (special.bei, cython_special.bei, ('d',), None), (special.beip, cython_special.beip, ('d',), None), (special.ber, cython_special.ber, ('d',), None), (special.berp, cython_special.berp, ('d',), None), (special.besselpoly, cython_special.besselpoly, ('ddd',), None), (special.beta, cython_special.beta, ('dd',), None), (special.betainc, cython_special.betainc, ('ddd',), None), (special.betaincinv, cython_special.betaincinv, ('ddd',), None), (special.betaln, cython_special.betaln, ('dd',), None), (special.binom, cython_special.binom, ('dd',), None), (special.boxcox, cython_special.boxcox, ('dd',), None), (special.boxcox1p, cython_special.boxcox1p, ('dd',), None), (special.btdtr, cython_special.btdtr, ('ddd',), None), (special.btdtri, cython_special.btdtri, ('ddd',), None), (special.btdtria, cython_special.btdtria, ('ddd',), None), (special.btdtrib, cython_special.btdtrib, ('ddd',), None), (special.cbrt, cython_special.cbrt, ('d',), None), (special.chdtr, cython_special.chdtr, ('dd',), None), (special.chdtrc, cython_special.chdtrc, ('dd',), None), (special.chdtri, cython_special.chdtri, ('dd',), None), (special.chdtriv, cython_special.chdtriv, ('dd',), None), (special.chndtr, cython_special.chndtr, ('ddd',), None), (special.chndtridf, cython_special.chndtridf, ('ddd',), None), (special.chndtrinc, cython_special.chndtrinc, ('ddd',), None), (special.chndtrix, cython_special.chndtrix, ('ddd',), None), (special.cosdg, cython_special.cosdg, ('d',), None), (special.cosm1, cython_special.cosm1, ('d',), None), (special.cotdg, cython_special.cotdg, ('d',), None), (special.dawsn, cython_special.dawsn, ('d', 'D'), None), (special.ellipe, cython_special.ellipe, ('d',), None), (special.ellipeinc, cython_special.ellipeinc, ('dd',), None), (special.ellipj, cython_special._ellipj_pywrap, ('dd',), None), (special.ellipkinc, cython_special.ellipkinc, ('dd',), None), (special.ellipkm1, cython_special.ellipkm1, ('d',), None), (special.ellipk, cython_special.ellipk, ('d',), None), (special.entr, cython_special.entr, ('d',), None), (special.erf, cython_special.erf, ('d', 'D'), None), (special.erfc, cython_special.erfc, ('d', 'D'), None), (special.erfcx, cython_special.erfcx, ('d', 'D'), None), (special.erfi, cython_special.erfi, ('d', 'D'), None), (special.erfinv, cython_special.erfinv, ('d',), None), (special.erfcinv, cython_special.erfcinv, ('d',), None), (special.eval_chebyc, cython_special.eval_chebyc, ('dd', 'dD', 'ld'), None), (special.eval_chebys, cython_special.eval_chebys, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyt, cython_special.eval_chebyt, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyu, cython_special.eval_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_gegenbauer, cython_special.eval_gegenbauer, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_genlaguerre, cython_special.eval_genlaguerre, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_hermite, cython_special.eval_hermite, ('ld',), None), (special.eval_hermitenorm, cython_special.eval_hermitenorm, ('ld',), None), (special.eval_jacobi, cython_special.eval_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_laguerre, cython_special.eval_laguerre, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_legendre, cython_special.eval_legendre, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyt, cython_special.eval_sh_chebyt, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyu, cython_special.eval_sh_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_sh_jacobi, cython_special.eval_sh_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_sh_legendre, cython_special.eval_sh_legendre, ('dd', 'dD', 'ld'), None), (special.exp1, cython_special.exp1, ('d', 'D'), None), (special.exp10, cython_special.exp10, ('d',), None), (special.exp2, cython_special.exp2, ('d',), None), (special.expi, cython_special.expi, ('d', 'D'), None), (special.expit, cython_special.expit, ('f', 'd', 'g'), None), (special.expm1, cython_special.expm1, ('d', 'D'), None), (special.expn, cython_special.expn, ('ld', 'dd'), None), (special.exprel, cython_special.exprel, ('d',), None), (special.fdtr, cython_special.fdtr, ('ddd',), None), (special.fdtrc, cython_special.fdtrc, ('ddd',), None), (special.fdtri, cython_special.fdtri, ('ddd',), None), (special.fdtridfd, cython_special.fdtridfd, ('ddd',), None), (special.fresnel, cython_special._fresnel_pywrap, ('d', 'D'), None), (special.gamma, cython_special.gamma, ('d', 'D'), None), (special.gammainc, cython_special.gammainc, ('dd',), None), (special.gammaincc, cython_special.gammaincc, ('dd',), None), (special.gammainccinv, cython_special.gammainccinv, ('dd',), None), (special.gammaincinv, cython_special.gammaincinv, ('dd',), None), (special.gammaln, cython_special.gammaln, ('d',), None), (special.gammasgn, cython_special.gammasgn, ('d',), None), (special.gdtr, cython_special.gdtr, ('ddd',), None), (special.gdtrc, cython_special.gdtrc, ('ddd',), None), (special.gdtria, cython_special.gdtria, ('ddd',), None), (special.gdtrib, cython_special.gdtrib, ('ddd',), None), (special.gdtrix, cython_special.gdtrix, ('ddd',), None), (special.hankel1, cython_special.hankel1, ('dD',), None), (special.hankel1e, cython_special.hankel1e, ('dD',), None), (special.hankel2, cython_special.hankel2, ('dD',), None), (special.hankel2e, cython_special.hankel2e, ('dD',), None), (special.huber, cython_special.huber, ('dd',), None), (special.hyp0f1, cython_special.hyp0f1, ('dd', 'dD'), None), (special.hyp1f1, cython_special.hyp1f1, ('ddd', 'ddD'), None), (special.hyp2f1, cython_special.hyp2f1, ('dddd', 'dddD'), None), (special.hyperu, cython_special.hyperu, ('ddd',), None), (special.i0, cython_special.i0, ('d',), None), (special.i0e, cython_special.i0e, ('d',), None), (special.i1, cython_special.i1, ('d',), None), (special.i1e, cython_special.i1e, ('d',), None), (special.inv_boxcox, cython_special.inv_boxcox, ('dd',), None), (special.inv_boxcox1p, cython_special.inv_boxcox1p, ('dd',), None), (special.it2i0k0, cython_special._it2i0k0_pywrap, ('d',), None), (special.it2j0y0, cython_special._it2j0y0_pywrap, ('d',), None), (special.it2struve0, cython_special.it2struve0, ('d',), None), (special.itairy, cython_special._itairy_pywrap, ('d',), None), (special.iti0k0, cython_special._iti0k0_pywrap, ('d',), None), (special.itj0y0, cython_special._itj0y0_pywrap, ('d',), None), (special.itmodstruve0, cython_special.itmodstruve0, ('d',), None), (special.itstruve0, cython_special.itstruve0, ('d',), None), (special.iv, cython_special.iv, ('dd', 'dD'), None), (special.ive, cython_special.ive, ('dd', 'dD'), None), (special.j0, cython_special.j0, ('d',), None), (special.j1, cython_special.j1, ('d',), None), (special.jv, cython_special.jv, ('dd', 'dD'), None), (special.jve, cython_special.jve, ('dd', 'dD'), None), (special.k0, cython_special.k0, ('d',), None), (special.k0e, cython_special.k0e, ('d',), None), (special.k1, cython_special.k1, ('d',), None), (special.k1e, cython_special.k1e, ('d',), None), (special.kei, cython_special.kei, ('d',), None), (special.keip, cython_special.keip, ('d',), None), (special.kelvin, cython_special._kelvin_pywrap, ('d',), None), (special.ker, cython_special.ker, ('d',), None), (special.kerp, cython_special.kerp, ('d',), None), (special.kl_div, cython_special.kl_div, ('dd',), None), (special.kn, cython_special.kn, ('ld', 'dd'), None), (special.kolmogi, cython_special.kolmogi, ('d',), None), (special.kolmogorov, cython_special.kolmogorov, ('d',), None), (special.kv, cython_special.kv, ('dd', 'dD'), None), (special.kve, cython_special.kve, ('dd', 'dD'), None), (special.log1p, cython_special.log1p, ('d', 'D'), None), (special.log_ndtr, cython_special.log_ndtr, ('d', 'D'), None), (special.ndtri_exp, cython_special.ndtri_exp, ('d',), None), (special.loggamma, cython_special.loggamma, ('D',), None), (special.logit, cython_special.logit, ('f', 'd', 'g'), None), (special.lpmv, cython_special.lpmv, ('ddd',), None), (special.mathieu_a, cython_special.mathieu_a, ('dd',), None), (special.mathieu_b, cython_special.mathieu_b, ('dd',), None), (special.mathieu_cem, cython_special._mathieu_cem_pywrap, ('ddd',), None), (special.mathieu_modcem1, cython_special._mathieu_modcem1_pywrap, ('ddd',), None), (special.mathieu_modcem2, cython_special._mathieu_modcem2_pywrap, ('ddd',), None), (special.mathieu_modsem1, cython_special._mathieu_modsem1_pywrap, ('ddd',), None), (special.mathieu_modsem2, cython_special._mathieu_modsem2_pywrap, ('ddd',), None), (special.mathieu_sem, cython_special._mathieu_sem_pywrap, ('ddd',), None), (special.modfresnelm, cython_special._modfresnelm_pywrap, ('d',), None), (special.modfresnelp, cython_special._modfresnelp_pywrap, ('d',), None), (special.modstruve, cython_special.modstruve, ('dd',), None), (special.nbdtr, cython_special.nbdtr, ('lld', 'ddd'), None), (special.nbdtrc, cython_special.nbdtrc, ('lld', 'ddd'), None), (special.nbdtri, cython_special.nbdtri, ('lld', 'ddd'), None), (special.nbdtrik, cython_special.nbdtrik, ('ddd',), None), (special.nbdtrin, cython_special.nbdtrin, ('ddd',), None), (special.ncfdtr, cython_special.ncfdtr, ('dddd',), None), (special.ncfdtri, cython_special.ncfdtri, ('dddd',), None), (special.ncfdtridfd, cython_special.ncfdtridfd, ('dddd',), None), (special.ncfdtridfn, cython_special.ncfdtridfn, ('dddd',), None), (special.ncfdtrinc, cython_special.ncfdtrinc, ('dddd',), None), (special.nctdtr, cython_special.nctdtr, ('ddd',), None), (special.nctdtridf, cython_special.nctdtridf, ('ddd',), None), (special.nctdtrinc, cython_special.nctdtrinc, ('ddd',), None), (special.nctdtrit, cython_special.nctdtrit, ('ddd',), None), (special.ndtr, cython_special.ndtr, ('d', 'D'), None), (special.ndtri, cython_special.ndtri, ('d',), None), (special.nrdtrimn, cython_special.nrdtrimn, ('ddd',), None), (special.nrdtrisd, cython_special.nrdtrisd, ('ddd',), None), (special.obl_ang1, cython_special._obl_ang1_pywrap, ('dddd',), None), (special.obl_ang1_cv, cython_special._obl_ang1_cv_pywrap, ('ddddd',), None), (special.obl_cv, cython_special.obl_cv, ('ddd',), None), (special.obl_rad1, cython_special._obl_rad1_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad1_cv, cython_special._obl_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.obl_rad2, cython_special._obl_rad2_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad2_cv, cython_special._obl_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pbdv, cython_special._pbdv_pywrap, ('dd',), None), (special.pbvv, cython_special._pbvv_pywrap, ('dd',), None), (special.pbwa, cython_special._pbwa_pywrap, ('dd',), None), (special.pdtr, cython_special.pdtr, ('dd', 'dd'), None), (special.pdtrc, cython_special.pdtrc, ('dd', 'dd'), None), (special.pdtri, cython_special.pdtri, ('ld', 'dd'), None), (special.pdtrik, cython_special.pdtrik, ('dd',), None), (special.poch, cython_special.poch, ('dd',), None), (special.pro_ang1, cython_special._pro_ang1_pywrap, ('dddd',), None), (special.pro_ang1_cv, cython_special._pro_ang1_cv_pywrap, ('ddddd',), None), (special.pro_cv, cython_special.pro_cv, ('ddd',), None), (special.pro_rad1, cython_special._pro_rad1_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad1_cv, cython_special._pro_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pro_rad2, cython_special._pro_rad2_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad2_cv, cython_special._pro_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pseudo_huber, cython_special.pseudo_huber, ('dd',), None), (special.psi, cython_special.psi, ('d', 'D'), None), (special.radian, cython_special.radian, ('ddd',), None), (special.rel_entr, cython_special.rel_entr, ('dd',), None), (special.rgamma, cython_special.rgamma, ('d', 'D'), None), (special.round, cython_special.round, ('d',), None), (special.spherical_jn, cython_special.spherical_jn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_yn, cython_special.spherical_yn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_in, cython_special.spherical_in, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_kn, cython_special.spherical_kn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.shichi, cython_special._shichi_pywrap, ('d', 'D'), None), (special.sici, cython_special._sici_pywrap, ('d', 'D'), None), (special.sindg, cython_special.sindg, ('d',), None), (special.smirnov, cython_special.smirnov, ('ld', 'dd'), None), (special.smirnovi, cython_special.smirnovi, ('ld', 'dd'), None), (special.spence, cython_special.spence, ('d', 'D'), None), (special.sph_harm, cython_special.sph_harm, ('lldd', 'dddd'), None), (special.stdtr, cython_special.stdtr, ('dd',), None), (special.stdtridf, cython_special.stdtridf, ('dd',), None), (special.stdtrit, cython_special.stdtrit, ('dd',), None), (special.struve, cython_special.struve, ('dd',), None), (special.tandg, cython_special.tandg, ('d',), None), (special.tklmbda, cython_special.tklmbda, ('dd',), None), (special.voigt_profile, cython_special.voigt_profile, ('ddd',), None), (special.wofz, cython_special.wofz, ('D',), None), (special.wright_bessel, cython_special.wright_bessel, ('ddd',), None), (special.wrightomega, cython_special.wrightomega, ('D',), None), (special.xlog1py, cython_special.xlog1py, ('dd', 'DD'), None), (special.xlogy, cython_special.xlogy, ('dd', 'DD'), None), (special.y0, cython_special.y0, ('d',), None), (special.y1, cython_special.y1, ('d',), None), (special.yn, cython_special.yn, ('ld', 'dd'), None), (special.yv, cython_special.yv, ('dd', 'dD'), None), (special.yve, cython_special.yve, ('dd', 'dD'), None), (special.zetac, cython_special.zetac, ('d',), None), (special.owens_t, cython_special.owens_t, ('dd',), None) ] IDS = [x[0].__name__ for x in PARAMS] def _generate_test_points(typecodes): axes = tuple(TEST_POINTS[x] for x in typecodes) pts = list(product(*axes)) return pts def test_cython_api_completeness(): # Check that everything is tested for name in dir(cython_special): func = getattr(cython_special, name) if callable(func) and not name.startswith('_'): for _, cyfun, _, _ in PARAMS: if cyfun is func: break else: raise RuntimeError(f"{name} missing from tests!") @pytest.mark.parametrize("param", PARAMS, ids=IDS) def test_cython_api(param): pyfunc, cyfunc, specializations, knownfailure = param if knownfailure: pytest.xfail(reason=knownfailure) # Check which parameters are expected to be fused types max_params = max(len(spec) for spec in specializations) values = [set() for _ in range(max_params)] for typecodes in specializations: for j, v in enumerate(typecodes): values[j].add(v) seen = set() is_fused_code = [False] * len(values) for j, v in enumerate(values): vv = tuple(sorted(v)) if vv in seen: continue is_fused_code[j] = (len(v) > 1) seen.add(vv) # Check results for typecodes in specializations: # Pick the correct specialized function signature = [CYTHON_SIGNATURE_MAP[code] for j, code in enumerate(typecodes) if is_fused_code[j]] if signature: cy_spec_func = cyfunc[tuple(signature)] else: signature = None cy_spec_func = cyfunc # Test it pts = _generate_test_points(typecodes) for pt in pts: with suppress_warnings() as sup: sup.filter(DeprecationWarning) pyval = pyfunc(*pt) cyval = cy_spec_func(*pt) assert_allclose(cyval, pyval, err_msg="{} {} {}".format(pt, typecodes, signature))
endolith/scipy
scipy/special/tests/test_cython_special.py
scipy/stats/_relative_risk.py
import os from os.path import join from scipy._build_utils import numpy_nodepr_api def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from scipy._build_utils.system_info import get_info from scipy._build_utils import (uses_blas64, blas_ilp64_pre_build_hook, combine_dict, get_f2py_int64_options) config = Configuration('integrate', parent_package, top_path) if uses_blas64(): lapack_opt = get_info('lapack_ilp64_opt', 2) pre_build_hook = blas_ilp64_pre_build_hook(lapack_opt) f2py_options = get_f2py_int64_options() else: lapack_opt = get_info('lapack_opt') pre_build_hook = None f2py_options = None mach_src = [join('mach','*.f')] quadpack_src = [join('quadpack', '*.f')] lsoda_src = [join('odepack', fn) for fn in [ 'blkdta000.f', 'bnorm.f', 'cfode.f', 'ewset.f', 'fnorm.f', 'intdy.f', 'lsoda.f', 'prja.f', 'solsy.f', 'srcma.f', 'stoda.f', 'vmnorm.f', 'xerrwv.f', 'xsetf.f', 'xsetun.f']] vode_src = [join('odepack', 'vode.f'), join('odepack', 'zvode.f')] dop_src = [join('dop','*.f')] quadpack_test_src = [join('tests','_test_multivariate.c')] odeint_banded_test_src = [join('tests', 'banded5x5.f')] config.add_library('mach', sources=mach_src, config_fc={'noopt': (__file__, 1)}, _pre_build_hook=pre_build_hook) config.add_library('quadpack', sources=quadpack_src, _pre_build_hook=pre_build_hook) config.add_library('lsoda', sources=lsoda_src, _pre_build_hook=pre_build_hook) config.add_library('vode', sources=vode_src, _pre_build_hook=pre_build_hook) config.add_library('dop', sources=dop_src, _pre_build_hook=pre_build_hook) # Extensions # quadpack: include_dirs = [join(os.path.dirname(__file__), '..', '_lib', 'src')] cfg = combine_dict(lapack_opt, include_dirs=include_dirs, libraries=['quadpack', 'mach']) config.add_extension('_quadpack', sources=['_quadpackmodule.c'], depends=(['__quadpack.h'] + quadpack_src + mach_src), **cfg) # odepack/lsoda-odeint cfg = combine_dict(lapack_opt, numpy_nodepr_api, libraries=['lsoda', 'mach']) config.add_extension('_odepack', sources=['_odepackmodule.c'], depends=(lsoda_src + mach_src), **cfg) # vode cfg = combine_dict(lapack_opt, libraries=['vode']) ext = config.add_extension('vode', sources=['vode.pyf'], depends=vode_src, f2py_options=f2py_options, **cfg) ext._pre_build_hook = pre_build_hook # lsoda cfg = combine_dict(lapack_opt, libraries=['lsoda', 'mach']) ext = config.add_extension('lsoda', sources=['lsoda.pyf'], depends=(lsoda_src + mach_src), f2py_options=f2py_options, **cfg) ext._pre_build_hook = pre_build_hook # dop ext = config.add_extension('_dop', sources=['dop.pyf'], libraries=['dop'], depends=dop_src, f2py_options=f2py_options) ext._pre_build_hook = pre_build_hook config.add_extension('_test_multivariate', sources=quadpack_test_src) # Fortran+f2py extension module for testing odeint. cfg = combine_dict(lapack_opt, libraries=['lsoda', 'mach']) ext = config.add_extension('_test_odeint_banded', sources=odeint_banded_test_src, depends=(lsoda_src + mach_src), f2py_options=f2py_options, **cfg) ext._pre_build_hook = pre_build_hook config.add_subpackage('_ivp') config.add_data_dir('tests') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
from __future__ import annotations from typing import List, Tuple, Callable, Optional import pytest from itertools import product from numpy.testing import assert_allclose, suppress_warnings from scipy import special from scipy.special import cython_special bint_points = [True, False] int_points = [-10, -1, 1, 10] real_points = [-10.0, -1.0, 1.0, 10.0] complex_points = [complex(*tup) for tup in product(real_points, repeat=2)] CYTHON_SIGNATURE_MAP = { 'b': 'bint', 'f': 'float', 'd': 'double', 'g': 'long double', 'F': 'float complex', 'D': 'double complex', 'G': 'long double complex', 'i': 'int', 'l': 'long' } TEST_POINTS = { 'b': bint_points, 'f': real_points, 'd': real_points, 'g': real_points, 'F': complex_points, 'D': complex_points, 'G': complex_points, 'i': int_points, 'l': int_points, } PARAMS: List[Tuple[Callable, Callable, Tuple[str, ...], Optional[str]]] = [ (special.agm, cython_special.agm, ('dd',), None), (special.airy, cython_special._airy_pywrap, ('d', 'D'), None), (special.airye, cython_special._airye_pywrap, ('d', 'D'), None), (special.bdtr, cython_special.bdtr, ('dld', 'ddd'), None), (special.bdtrc, cython_special.bdtrc, ('dld', 'ddd'), None), (special.bdtri, cython_special.bdtri, ('dld', 'ddd'), None), (special.bdtrik, cython_special.bdtrik, ('ddd',), None), (special.bdtrin, cython_special.bdtrin, ('ddd',), None), (special.bei, cython_special.bei, ('d',), None), (special.beip, cython_special.beip, ('d',), None), (special.ber, cython_special.ber, ('d',), None), (special.berp, cython_special.berp, ('d',), None), (special.besselpoly, cython_special.besselpoly, ('ddd',), None), (special.beta, cython_special.beta, ('dd',), None), (special.betainc, cython_special.betainc, ('ddd',), None), (special.betaincinv, cython_special.betaincinv, ('ddd',), None), (special.betaln, cython_special.betaln, ('dd',), None), (special.binom, cython_special.binom, ('dd',), None), (special.boxcox, cython_special.boxcox, ('dd',), None), (special.boxcox1p, cython_special.boxcox1p, ('dd',), None), (special.btdtr, cython_special.btdtr, ('ddd',), None), (special.btdtri, cython_special.btdtri, ('ddd',), None), (special.btdtria, cython_special.btdtria, ('ddd',), None), (special.btdtrib, cython_special.btdtrib, ('ddd',), None), (special.cbrt, cython_special.cbrt, ('d',), None), (special.chdtr, cython_special.chdtr, ('dd',), None), (special.chdtrc, cython_special.chdtrc, ('dd',), None), (special.chdtri, cython_special.chdtri, ('dd',), None), (special.chdtriv, cython_special.chdtriv, ('dd',), None), (special.chndtr, cython_special.chndtr, ('ddd',), None), (special.chndtridf, cython_special.chndtridf, ('ddd',), None), (special.chndtrinc, cython_special.chndtrinc, ('ddd',), None), (special.chndtrix, cython_special.chndtrix, ('ddd',), None), (special.cosdg, cython_special.cosdg, ('d',), None), (special.cosm1, cython_special.cosm1, ('d',), None), (special.cotdg, cython_special.cotdg, ('d',), None), (special.dawsn, cython_special.dawsn, ('d', 'D'), None), (special.ellipe, cython_special.ellipe, ('d',), None), (special.ellipeinc, cython_special.ellipeinc, ('dd',), None), (special.ellipj, cython_special._ellipj_pywrap, ('dd',), None), (special.ellipkinc, cython_special.ellipkinc, ('dd',), None), (special.ellipkm1, cython_special.ellipkm1, ('d',), None), (special.ellipk, cython_special.ellipk, ('d',), None), (special.entr, cython_special.entr, ('d',), None), (special.erf, cython_special.erf, ('d', 'D'), None), (special.erfc, cython_special.erfc, ('d', 'D'), None), (special.erfcx, cython_special.erfcx, ('d', 'D'), None), (special.erfi, cython_special.erfi, ('d', 'D'), None), (special.erfinv, cython_special.erfinv, ('d',), None), (special.erfcinv, cython_special.erfcinv, ('d',), None), (special.eval_chebyc, cython_special.eval_chebyc, ('dd', 'dD', 'ld'), None), (special.eval_chebys, cython_special.eval_chebys, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyt, cython_special.eval_chebyt, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyu, cython_special.eval_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_gegenbauer, cython_special.eval_gegenbauer, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_genlaguerre, cython_special.eval_genlaguerre, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_hermite, cython_special.eval_hermite, ('ld',), None), (special.eval_hermitenorm, cython_special.eval_hermitenorm, ('ld',), None), (special.eval_jacobi, cython_special.eval_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_laguerre, cython_special.eval_laguerre, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_legendre, cython_special.eval_legendre, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyt, cython_special.eval_sh_chebyt, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyu, cython_special.eval_sh_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_sh_jacobi, cython_special.eval_sh_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_sh_legendre, cython_special.eval_sh_legendre, ('dd', 'dD', 'ld'), None), (special.exp1, cython_special.exp1, ('d', 'D'), None), (special.exp10, cython_special.exp10, ('d',), None), (special.exp2, cython_special.exp2, ('d',), None), (special.expi, cython_special.expi, ('d', 'D'), None), (special.expit, cython_special.expit, ('f', 'd', 'g'), None), (special.expm1, cython_special.expm1, ('d', 'D'), None), (special.expn, cython_special.expn, ('ld', 'dd'), None), (special.exprel, cython_special.exprel, ('d',), None), (special.fdtr, cython_special.fdtr, ('ddd',), None), (special.fdtrc, cython_special.fdtrc, ('ddd',), None), (special.fdtri, cython_special.fdtri, ('ddd',), None), (special.fdtridfd, cython_special.fdtridfd, ('ddd',), None), (special.fresnel, cython_special._fresnel_pywrap, ('d', 'D'), None), (special.gamma, cython_special.gamma, ('d', 'D'), None), (special.gammainc, cython_special.gammainc, ('dd',), None), (special.gammaincc, cython_special.gammaincc, ('dd',), None), (special.gammainccinv, cython_special.gammainccinv, ('dd',), None), (special.gammaincinv, cython_special.gammaincinv, ('dd',), None), (special.gammaln, cython_special.gammaln, ('d',), None), (special.gammasgn, cython_special.gammasgn, ('d',), None), (special.gdtr, cython_special.gdtr, ('ddd',), None), (special.gdtrc, cython_special.gdtrc, ('ddd',), None), (special.gdtria, cython_special.gdtria, ('ddd',), None), (special.gdtrib, cython_special.gdtrib, ('ddd',), None), (special.gdtrix, cython_special.gdtrix, ('ddd',), None), (special.hankel1, cython_special.hankel1, ('dD',), None), (special.hankel1e, cython_special.hankel1e, ('dD',), None), (special.hankel2, cython_special.hankel2, ('dD',), None), (special.hankel2e, cython_special.hankel2e, ('dD',), None), (special.huber, cython_special.huber, ('dd',), None), (special.hyp0f1, cython_special.hyp0f1, ('dd', 'dD'), None), (special.hyp1f1, cython_special.hyp1f1, ('ddd', 'ddD'), None), (special.hyp2f1, cython_special.hyp2f1, ('dddd', 'dddD'), None), (special.hyperu, cython_special.hyperu, ('ddd',), None), (special.i0, cython_special.i0, ('d',), None), (special.i0e, cython_special.i0e, ('d',), None), (special.i1, cython_special.i1, ('d',), None), (special.i1e, cython_special.i1e, ('d',), None), (special.inv_boxcox, cython_special.inv_boxcox, ('dd',), None), (special.inv_boxcox1p, cython_special.inv_boxcox1p, ('dd',), None), (special.it2i0k0, cython_special._it2i0k0_pywrap, ('d',), None), (special.it2j0y0, cython_special._it2j0y0_pywrap, ('d',), None), (special.it2struve0, cython_special.it2struve0, ('d',), None), (special.itairy, cython_special._itairy_pywrap, ('d',), None), (special.iti0k0, cython_special._iti0k0_pywrap, ('d',), None), (special.itj0y0, cython_special._itj0y0_pywrap, ('d',), None), (special.itmodstruve0, cython_special.itmodstruve0, ('d',), None), (special.itstruve0, cython_special.itstruve0, ('d',), None), (special.iv, cython_special.iv, ('dd', 'dD'), None), (special.ive, cython_special.ive, ('dd', 'dD'), None), (special.j0, cython_special.j0, ('d',), None), (special.j1, cython_special.j1, ('d',), None), (special.jv, cython_special.jv, ('dd', 'dD'), None), (special.jve, cython_special.jve, ('dd', 'dD'), None), (special.k0, cython_special.k0, ('d',), None), (special.k0e, cython_special.k0e, ('d',), None), (special.k1, cython_special.k1, ('d',), None), (special.k1e, cython_special.k1e, ('d',), None), (special.kei, cython_special.kei, ('d',), None), (special.keip, cython_special.keip, ('d',), None), (special.kelvin, cython_special._kelvin_pywrap, ('d',), None), (special.ker, cython_special.ker, ('d',), None), (special.kerp, cython_special.kerp, ('d',), None), (special.kl_div, cython_special.kl_div, ('dd',), None), (special.kn, cython_special.kn, ('ld', 'dd'), None), (special.kolmogi, cython_special.kolmogi, ('d',), None), (special.kolmogorov, cython_special.kolmogorov, ('d',), None), (special.kv, cython_special.kv, ('dd', 'dD'), None), (special.kve, cython_special.kve, ('dd', 'dD'), None), (special.log1p, cython_special.log1p, ('d', 'D'), None), (special.log_ndtr, cython_special.log_ndtr, ('d', 'D'), None), (special.ndtri_exp, cython_special.ndtri_exp, ('d',), None), (special.loggamma, cython_special.loggamma, ('D',), None), (special.logit, cython_special.logit, ('f', 'd', 'g'), None), (special.lpmv, cython_special.lpmv, ('ddd',), None), (special.mathieu_a, cython_special.mathieu_a, ('dd',), None), (special.mathieu_b, cython_special.mathieu_b, ('dd',), None), (special.mathieu_cem, cython_special._mathieu_cem_pywrap, ('ddd',), None), (special.mathieu_modcem1, cython_special._mathieu_modcem1_pywrap, ('ddd',), None), (special.mathieu_modcem2, cython_special._mathieu_modcem2_pywrap, ('ddd',), None), (special.mathieu_modsem1, cython_special._mathieu_modsem1_pywrap, ('ddd',), None), (special.mathieu_modsem2, cython_special._mathieu_modsem2_pywrap, ('ddd',), None), (special.mathieu_sem, cython_special._mathieu_sem_pywrap, ('ddd',), None), (special.modfresnelm, cython_special._modfresnelm_pywrap, ('d',), None), (special.modfresnelp, cython_special._modfresnelp_pywrap, ('d',), None), (special.modstruve, cython_special.modstruve, ('dd',), None), (special.nbdtr, cython_special.nbdtr, ('lld', 'ddd'), None), (special.nbdtrc, cython_special.nbdtrc, ('lld', 'ddd'), None), (special.nbdtri, cython_special.nbdtri, ('lld', 'ddd'), None), (special.nbdtrik, cython_special.nbdtrik, ('ddd',), None), (special.nbdtrin, cython_special.nbdtrin, ('ddd',), None), (special.ncfdtr, cython_special.ncfdtr, ('dddd',), None), (special.ncfdtri, cython_special.ncfdtri, ('dddd',), None), (special.ncfdtridfd, cython_special.ncfdtridfd, ('dddd',), None), (special.ncfdtridfn, cython_special.ncfdtridfn, ('dddd',), None), (special.ncfdtrinc, cython_special.ncfdtrinc, ('dddd',), None), (special.nctdtr, cython_special.nctdtr, ('ddd',), None), (special.nctdtridf, cython_special.nctdtridf, ('ddd',), None), (special.nctdtrinc, cython_special.nctdtrinc, ('ddd',), None), (special.nctdtrit, cython_special.nctdtrit, ('ddd',), None), (special.ndtr, cython_special.ndtr, ('d', 'D'), None), (special.ndtri, cython_special.ndtri, ('d',), None), (special.nrdtrimn, cython_special.nrdtrimn, ('ddd',), None), (special.nrdtrisd, cython_special.nrdtrisd, ('ddd',), None), (special.obl_ang1, cython_special._obl_ang1_pywrap, ('dddd',), None), (special.obl_ang1_cv, cython_special._obl_ang1_cv_pywrap, ('ddddd',), None), (special.obl_cv, cython_special.obl_cv, ('ddd',), None), (special.obl_rad1, cython_special._obl_rad1_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad1_cv, cython_special._obl_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.obl_rad2, cython_special._obl_rad2_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad2_cv, cython_special._obl_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pbdv, cython_special._pbdv_pywrap, ('dd',), None), (special.pbvv, cython_special._pbvv_pywrap, ('dd',), None), (special.pbwa, cython_special._pbwa_pywrap, ('dd',), None), (special.pdtr, cython_special.pdtr, ('dd', 'dd'), None), (special.pdtrc, cython_special.pdtrc, ('dd', 'dd'), None), (special.pdtri, cython_special.pdtri, ('ld', 'dd'), None), (special.pdtrik, cython_special.pdtrik, ('dd',), None), (special.poch, cython_special.poch, ('dd',), None), (special.pro_ang1, cython_special._pro_ang1_pywrap, ('dddd',), None), (special.pro_ang1_cv, cython_special._pro_ang1_cv_pywrap, ('ddddd',), None), (special.pro_cv, cython_special.pro_cv, ('ddd',), None), (special.pro_rad1, cython_special._pro_rad1_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad1_cv, cython_special._pro_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pro_rad2, cython_special._pro_rad2_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad2_cv, cython_special._pro_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pseudo_huber, cython_special.pseudo_huber, ('dd',), None), (special.psi, cython_special.psi, ('d', 'D'), None), (special.radian, cython_special.radian, ('ddd',), None), (special.rel_entr, cython_special.rel_entr, ('dd',), None), (special.rgamma, cython_special.rgamma, ('d', 'D'), None), (special.round, cython_special.round, ('d',), None), (special.spherical_jn, cython_special.spherical_jn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_yn, cython_special.spherical_yn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_in, cython_special.spherical_in, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_kn, cython_special.spherical_kn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.shichi, cython_special._shichi_pywrap, ('d', 'D'), None), (special.sici, cython_special._sici_pywrap, ('d', 'D'), None), (special.sindg, cython_special.sindg, ('d',), None), (special.smirnov, cython_special.smirnov, ('ld', 'dd'), None), (special.smirnovi, cython_special.smirnovi, ('ld', 'dd'), None), (special.spence, cython_special.spence, ('d', 'D'), None), (special.sph_harm, cython_special.sph_harm, ('lldd', 'dddd'), None), (special.stdtr, cython_special.stdtr, ('dd',), None), (special.stdtridf, cython_special.stdtridf, ('dd',), None), (special.stdtrit, cython_special.stdtrit, ('dd',), None), (special.struve, cython_special.struve, ('dd',), None), (special.tandg, cython_special.tandg, ('d',), None), (special.tklmbda, cython_special.tklmbda, ('dd',), None), (special.voigt_profile, cython_special.voigt_profile, ('ddd',), None), (special.wofz, cython_special.wofz, ('D',), None), (special.wright_bessel, cython_special.wright_bessel, ('ddd',), None), (special.wrightomega, cython_special.wrightomega, ('D',), None), (special.xlog1py, cython_special.xlog1py, ('dd', 'DD'), None), (special.xlogy, cython_special.xlogy, ('dd', 'DD'), None), (special.y0, cython_special.y0, ('d',), None), (special.y1, cython_special.y1, ('d',), None), (special.yn, cython_special.yn, ('ld', 'dd'), None), (special.yv, cython_special.yv, ('dd', 'dD'), None), (special.yve, cython_special.yve, ('dd', 'dD'), None), (special.zetac, cython_special.zetac, ('d',), None), (special.owens_t, cython_special.owens_t, ('dd',), None) ] IDS = [x[0].__name__ for x in PARAMS] def _generate_test_points(typecodes): axes = tuple(TEST_POINTS[x] for x in typecodes) pts = list(product(*axes)) return pts def test_cython_api_completeness(): # Check that everything is tested for name in dir(cython_special): func = getattr(cython_special, name) if callable(func) and not name.startswith('_'): for _, cyfun, _, _ in PARAMS: if cyfun is func: break else: raise RuntimeError(f"{name} missing from tests!") @pytest.mark.parametrize("param", PARAMS, ids=IDS) def test_cython_api(param): pyfunc, cyfunc, specializations, knownfailure = param if knownfailure: pytest.xfail(reason=knownfailure) # Check which parameters are expected to be fused types max_params = max(len(spec) for spec in specializations) values = [set() for _ in range(max_params)] for typecodes in specializations: for j, v in enumerate(typecodes): values[j].add(v) seen = set() is_fused_code = [False] * len(values) for j, v in enumerate(values): vv = tuple(sorted(v)) if vv in seen: continue is_fused_code[j] = (len(v) > 1) seen.add(vv) # Check results for typecodes in specializations: # Pick the correct specialized function signature = [CYTHON_SIGNATURE_MAP[code] for j, code in enumerate(typecodes) if is_fused_code[j]] if signature: cy_spec_func = cyfunc[tuple(signature)] else: signature = None cy_spec_func = cyfunc # Test it pts = _generate_test_points(typecodes) for pt in pts: with suppress_warnings() as sup: sup.filter(DeprecationWarning) pyval = pyfunc(*pt) cyval = cy_spec_func(*pt) assert_allclose(cyval, pyval, err_msg="{} {} {}".format(pt, typecodes, signature))
endolith/scipy
scipy/special/tests/test_cython_special.py
scipy/integrate/setup.py
""" Eigenvalue solver using iterative methods. Find k eigenvectors and eigenvalues of a matrix A using the Arnoldi/Lanczos iterative methods from ARPACK [1]_,[2]_. These methods are most useful for large sparse matrices. - eigs(A,k) - eigsh(A,k) References ---------- .. [1] ARPACK Software, http://www.caam.rice.edu/software/ARPACK/ .. [2] R. B. Lehoucq, D. C. Sorensen, and C. Yang, ARPACK USERS GUIDE: Solution of Large Scale Eigenvalue Problems by Implicitly Restarted Arnoldi Methods. SIAM, Philadelphia, PA, 1998. """ from .arpack import *
from __future__ import annotations from typing import List, Tuple, Callable, Optional import pytest from itertools import product from numpy.testing import assert_allclose, suppress_warnings from scipy import special from scipy.special import cython_special bint_points = [True, False] int_points = [-10, -1, 1, 10] real_points = [-10.0, -1.0, 1.0, 10.0] complex_points = [complex(*tup) for tup in product(real_points, repeat=2)] CYTHON_SIGNATURE_MAP = { 'b': 'bint', 'f': 'float', 'd': 'double', 'g': 'long double', 'F': 'float complex', 'D': 'double complex', 'G': 'long double complex', 'i': 'int', 'l': 'long' } TEST_POINTS = { 'b': bint_points, 'f': real_points, 'd': real_points, 'g': real_points, 'F': complex_points, 'D': complex_points, 'G': complex_points, 'i': int_points, 'l': int_points, } PARAMS: List[Tuple[Callable, Callable, Tuple[str, ...], Optional[str]]] = [ (special.agm, cython_special.agm, ('dd',), None), (special.airy, cython_special._airy_pywrap, ('d', 'D'), None), (special.airye, cython_special._airye_pywrap, ('d', 'D'), None), (special.bdtr, cython_special.bdtr, ('dld', 'ddd'), None), (special.bdtrc, cython_special.bdtrc, ('dld', 'ddd'), None), (special.bdtri, cython_special.bdtri, ('dld', 'ddd'), None), (special.bdtrik, cython_special.bdtrik, ('ddd',), None), (special.bdtrin, cython_special.bdtrin, ('ddd',), None), (special.bei, cython_special.bei, ('d',), None), (special.beip, cython_special.beip, ('d',), None), (special.ber, cython_special.ber, ('d',), None), (special.berp, cython_special.berp, ('d',), None), (special.besselpoly, cython_special.besselpoly, ('ddd',), None), (special.beta, cython_special.beta, ('dd',), None), (special.betainc, cython_special.betainc, ('ddd',), None), (special.betaincinv, cython_special.betaincinv, ('ddd',), None), (special.betaln, cython_special.betaln, ('dd',), None), (special.binom, cython_special.binom, ('dd',), None), (special.boxcox, cython_special.boxcox, ('dd',), None), (special.boxcox1p, cython_special.boxcox1p, ('dd',), None), (special.btdtr, cython_special.btdtr, ('ddd',), None), (special.btdtri, cython_special.btdtri, ('ddd',), None), (special.btdtria, cython_special.btdtria, ('ddd',), None), (special.btdtrib, cython_special.btdtrib, ('ddd',), None), (special.cbrt, cython_special.cbrt, ('d',), None), (special.chdtr, cython_special.chdtr, ('dd',), None), (special.chdtrc, cython_special.chdtrc, ('dd',), None), (special.chdtri, cython_special.chdtri, ('dd',), None), (special.chdtriv, cython_special.chdtriv, ('dd',), None), (special.chndtr, cython_special.chndtr, ('ddd',), None), (special.chndtridf, cython_special.chndtridf, ('ddd',), None), (special.chndtrinc, cython_special.chndtrinc, ('ddd',), None), (special.chndtrix, cython_special.chndtrix, ('ddd',), None), (special.cosdg, cython_special.cosdg, ('d',), None), (special.cosm1, cython_special.cosm1, ('d',), None), (special.cotdg, cython_special.cotdg, ('d',), None), (special.dawsn, cython_special.dawsn, ('d', 'D'), None), (special.ellipe, cython_special.ellipe, ('d',), None), (special.ellipeinc, cython_special.ellipeinc, ('dd',), None), (special.ellipj, cython_special._ellipj_pywrap, ('dd',), None), (special.ellipkinc, cython_special.ellipkinc, ('dd',), None), (special.ellipkm1, cython_special.ellipkm1, ('d',), None), (special.ellipk, cython_special.ellipk, ('d',), None), (special.entr, cython_special.entr, ('d',), None), (special.erf, cython_special.erf, ('d', 'D'), None), (special.erfc, cython_special.erfc, ('d', 'D'), None), (special.erfcx, cython_special.erfcx, ('d', 'D'), None), (special.erfi, cython_special.erfi, ('d', 'D'), None), (special.erfinv, cython_special.erfinv, ('d',), None), (special.erfcinv, cython_special.erfcinv, ('d',), None), (special.eval_chebyc, cython_special.eval_chebyc, ('dd', 'dD', 'ld'), None), (special.eval_chebys, cython_special.eval_chebys, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyt, cython_special.eval_chebyt, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyu, cython_special.eval_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_gegenbauer, cython_special.eval_gegenbauer, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_genlaguerre, cython_special.eval_genlaguerre, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_hermite, cython_special.eval_hermite, ('ld',), None), (special.eval_hermitenorm, cython_special.eval_hermitenorm, ('ld',), None), (special.eval_jacobi, cython_special.eval_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_laguerre, cython_special.eval_laguerre, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_legendre, cython_special.eval_legendre, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyt, cython_special.eval_sh_chebyt, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyu, cython_special.eval_sh_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_sh_jacobi, cython_special.eval_sh_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_sh_legendre, cython_special.eval_sh_legendre, ('dd', 'dD', 'ld'), None), (special.exp1, cython_special.exp1, ('d', 'D'), None), (special.exp10, cython_special.exp10, ('d',), None), (special.exp2, cython_special.exp2, ('d',), None), (special.expi, cython_special.expi, ('d', 'D'), None), (special.expit, cython_special.expit, ('f', 'd', 'g'), None), (special.expm1, cython_special.expm1, ('d', 'D'), None), (special.expn, cython_special.expn, ('ld', 'dd'), None), (special.exprel, cython_special.exprel, ('d',), None), (special.fdtr, cython_special.fdtr, ('ddd',), None), (special.fdtrc, cython_special.fdtrc, ('ddd',), None), (special.fdtri, cython_special.fdtri, ('ddd',), None), (special.fdtridfd, cython_special.fdtridfd, ('ddd',), None), (special.fresnel, cython_special._fresnel_pywrap, ('d', 'D'), None), (special.gamma, cython_special.gamma, ('d', 'D'), None), (special.gammainc, cython_special.gammainc, ('dd',), None), (special.gammaincc, cython_special.gammaincc, ('dd',), None), (special.gammainccinv, cython_special.gammainccinv, ('dd',), None), (special.gammaincinv, cython_special.gammaincinv, ('dd',), None), (special.gammaln, cython_special.gammaln, ('d',), None), (special.gammasgn, cython_special.gammasgn, ('d',), None), (special.gdtr, cython_special.gdtr, ('ddd',), None), (special.gdtrc, cython_special.gdtrc, ('ddd',), None), (special.gdtria, cython_special.gdtria, ('ddd',), None), (special.gdtrib, cython_special.gdtrib, ('ddd',), None), (special.gdtrix, cython_special.gdtrix, ('ddd',), None), (special.hankel1, cython_special.hankel1, ('dD',), None), (special.hankel1e, cython_special.hankel1e, ('dD',), None), (special.hankel2, cython_special.hankel2, ('dD',), None), (special.hankel2e, cython_special.hankel2e, ('dD',), None), (special.huber, cython_special.huber, ('dd',), None), (special.hyp0f1, cython_special.hyp0f1, ('dd', 'dD'), None), (special.hyp1f1, cython_special.hyp1f1, ('ddd', 'ddD'), None), (special.hyp2f1, cython_special.hyp2f1, ('dddd', 'dddD'), None), (special.hyperu, cython_special.hyperu, ('ddd',), None), (special.i0, cython_special.i0, ('d',), None), (special.i0e, cython_special.i0e, ('d',), None), (special.i1, cython_special.i1, ('d',), None), (special.i1e, cython_special.i1e, ('d',), None), (special.inv_boxcox, cython_special.inv_boxcox, ('dd',), None), (special.inv_boxcox1p, cython_special.inv_boxcox1p, ('dd',), None), (special.it2i0k0, cython_special._it2i0k0_pywrap, ('d',), None), (special.it2j0y0, cython_special._it2j0y0_pywrap, ('d',), None), (special.it2struve0, cython_special.it2struve0, ('d',), None), (special.itairy, cython_special._itairy_pywrap, ('d',), None), (special.iti0k0, cython_special._iti0k0_pywrap, ('d',), None), (special.itj0y0, cython_special._itj0y0_pywrap, ('d',), None), (special.itmodstruve0, cython_special.itmodstruve0, ('d',), None), (special.itstruve0, cython_special.itstruve0, ('d',), None), (special.iv, cython_special.iv, ('dd', 'dD'), None), (special.ive, cython_special.ive, ('dd', 'dD'), None), (special.j0, cython_special.j0, ('d',), None), (special.j1, cython_special.j1, ('d',), None), (special.jv, cython_special.jv, ('dd', 'dD'), None), (special.jve, cython_special.jve, ('dd', 'dD'), None), (special.k0, cython_special.k0, ('d',), None), (special.k0e, cython_special.k0e, ('d',), None), (special.k1, cython_special.k1, ('d',), None), (special.k1e, cython_special.k1e, ('d',), None), (special.kei, cython_special.kei, ('d',), None), (special.keip, cython_special.keip, ('d',), None), (special.kelvin, cython_special._kelvin_pywrap, ('d',), None), (special.ker, cython_special.ker, ('d',), None), (special.kerp, cython_special.kerp, ('d',), None), (special.kl_div, cython_special.kl_div, ('dd',), None), (special.kn, cython_special.kn, ('ld', 'dd'), None), (special.kolmogi, cython_special.kolmogi, ('d',), None), (special.kolmogorov, cython_special.kolmogorov, ('d',), None), (special.kv, cython_special.kv, ('dd', 'dD'), None), (special.kve, cython_special.kve, ('dd', 'dD'), None), (special.log1p, cython_special.log1p, ('d', 'D'), None), (special.log_ndtr, cython_special.log_ndtr, ('d', 'D'), None), (special.ndtri_exp, cython_special.ndtri_exp, ('d',), None), (special.loggamma, cython_special.loggamma, ('D',), None), (special.logit, cython_special.logit, ('f', 'd', 'g'), None), (special.lpmv, cython_special.lpmv, ('ddd',), None), (special.mathieu_a, cython_special.mathieu_a, ('dd',), None), (special.mathieu_b, cython_special.mathieu_b, ('dd',), None), (special.mathieu_cem, cython_special._mathieu_cem_pywrap, ('ddd',), None), (special.mathieu_modcem1, cython_special._mathieu_modcem1_pywrap, ('ddd',), None), (special.mathieu_modcem2, cython_special._mathieu_modcem2_pywrap, ('ddd',), None), (special.mathieu_modsem1, cython_special._mathieu_modsem1_pywrap, ('ddd',), None), (special.mathieu_modsem2, cython_special._mathieu_modsem2_pywrap, ('ddd',), None), (special.mathieu_sem, cython_special._mathieu_sem_pywrap, ('ddd',), None), (special.modfresnelm, cython_special._modfresnelm_pywrap, ('d',), None), (special.modfresnelp, cython_special._modfresnelp_pywrap, ('d',), None), (special.modstruve, cython_special.modstruve, ('dd',), None), (special.nbdtr, cython_special.nbdtr, ('lld', 'ddd'), None), (special.nbdtrc, cython_special.nbdtrc, ('lld', 'ddd'), None), (special.nbdtri, cython_special.nbdtri, ('lld', 'ddd'), None), (special.nbdtrik, cython_special.nbdtrik, ('ddd',), None), (special.nbdtrin, cython_special.nbdtrin, ('ddd',), None), (special.ncfdtr, cython_special.ncfdtr, ('dddd',), None), (special.ncfdtri, cython_special.ncfdtri, ('dddd',), None), (special.ncfdtridfd, cython_special.ncfdtridfd, ('dddd',), None), (special.ncfdtridfn, cython_special.ncfdtridfn, ('dddd',), None), (special.ncfdtrinc, cython_special.ncfdtrinc, ('dddd',), None), (special.nctdtr, cython_special.nctdtr, ('ddd',), None), (special.nctdtridf, cython_special.nctdtridf, ('ddd',), None), (special.nctdtrinc, cython_special.nctdtrinc, ('ddd',), None), (special.nctdtrit, cython_special.nctdtrit, ('ddd',), None), (special.ndtr, cython_special.ndtr, ('d', 'D'), None), (special.ndtri, cython_special.ndtri, ('d',), None), (special.nrdtrimn, cython_special.nrdtrimn, ('ddd',), None), (special.nrdtrisd, cython_special.nrdtrisd, ('ddd',), None), (special.obl_ang1, cython_special._obl_ang1_pywrap, ('dddd',), None), (special.obl_ang1_cv, cython_special._obl_ang1_cv_pywrap, ('ddddd',), None), (special.obl_cv, cython_special.obl_cv, ('ddd',), None), (special.obl_rad1, cython_special._obl_rad1_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad1_cv, cython_special._obl_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.obl_rad2, cython_special._obl_rad2_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad2_cv, cython_special._obl_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pbdv, cython_special._pbdv_pywrap, ('dd',), None), (special.pbvv, cython_special._pbvv_pywrap, ('dd',), None), (special.pbwa, cython_special._pbwa_pywrap, ('dd',), None), (special.pdtr, cython_special.pdtr, ('dd', 'dd'), None), (special.pdtrc, cython_special.pdtrc, ('dd', 'dd'), None), (special.pdtri, cython_special.pdtri, ('ld', 'dd'), None), (special.pdtrik, cython_special.pdtrik, ('dd',), None), (special.poch, cython_special.poch, ('dd',), None), (special.pro_ang1, cython_special._pro_ang1_pywrap, ('dddd',), None), (special.pro_ang1_cv, cython_special._pro_ang1_cv_pywrap, ('ddddd',), None), (special.pro_cv, cython_special.pro_cv, ('ddd',), None), (special.pro_rad1, cython_special._pro_rad1_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad1_cv, cython_special._pro_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pro_rad2, cython_special._pro_rad2_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad2_cv, cython_special._pro_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pseudo_huber, cython_special.pseudo_huber, ('dd',), None), (special.psi, cython_special.psi, ('d', 'D'), None), (special.radian, cython_special.radian, ('ddd',), None), (special.rel_entr, cython_special.rel_entr, ('dd',), None), (special.rgamma, cython_special.rgamma, ('d', 'D'), None), (special.round, cython_special.round, ('d',), None), (special.spherical_jn, cython_special.spherical_jn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_yn, cython_special.spherical_yn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_in, cython_special.spherical_in, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_kn, cython_special.spherical_kn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.shichi, cython_special._shichi_pywrap, ('d', 'D'), None), (special.sici, cython_special._sici_pywrap, ('d', 'D'), None), (special.sindg, cython_special.sindg, ('d',), None), (special.smirnov, cython_special.smirnov, ('ld', 'dd'), None), (special.smirnovi, cython_special.smirnovi, ('ld', 'dd'), None), (special.spence, cython_special.spence, ('d', 'D'), None), (special.sph_harm, cython_special.sph_harm, ('lldd', 'dddd'), None), (special.stdtr, cython_special.stdtr, ('dd',), None), (special.stdtridf, cython_special.stdtridf, ('dd',), None), (special.stdtrit, cython_special.stdtrit, ('dd',), None), (special.struve, cython_special.struve, ('dd',), None), (special.tandg, cython_special.tandg, ('d',), None), (special.tklmbda, cython_special.tklmbda, ('dd',), None), (special.voigt_profile, cython_special.voigt_profile, ('ddd',), None), (special.wofz, cython_special.wofz, ('D',), None), (special.wright_bessel, cython_special.wright_bessel, ('ddd',), None), (special.wrightomega, cython_special.wrightomega, ('D',), None), (special.xlog1py, cython_special.xlog1py, ('dd', 'DD'), None), (special.xlogy, cython_special.xlogy, ('dd', 'DD'), None), (special.y0, cython_special.y0, ('d',), None), (special.y1, cython_special.y1, ('d',), None), (special.yn, cython_special.yn, ('ld', 'dd'), None), (special.yv, cython_special.yv, ('dd', 'dD'), None), (special.yve, cython_special.yve, ('dd', 'dD'), None), (special.zetac, cython_special.zetac, ('d',), None), (special.owens_t, cython_special.owens_t, ('dd',), None) ] IDS = [x[0].__name__ for x in PARAMS] def _generate_test_points(typecodes): axes = tuple(TEST_POINTS[x] for x in typecodes) pts = list(product(*axes)) return pts def test_cython_api_completeness(): # Check that everything is tested for name in dir(cython_special): func = getattr(cython_special, name) if callable(func) and not name.startswith('_'): for _, cyfun, _, _ in PARAMS: if cyfun is func: break else: raise RuntimeError(f"{name} missing from tests!") @pytest.mark.parametrize("param", PARAMS, ids=IDS) def test_cython_api(param): pyfunc, cyfunc, specializations, knownfailure = param if knownfailure: pytest.xfail(reason=knownfailure) # Check which parameters are expected to be fused types max_params = max(len(spec) for spec in specializations) values = [set() for _ in range(max_params)] for typecodes in specializations: for j, v in enumerate(typecodes): values[j].add(v) seen = set() is_fused_code = [False] * len(values) for j, v in enumerate(values): vv = tuple(sorted(v)) if vv in seen: continue is_fused_code[j] = (len(v) > 1) seen.add(vv) # Check results for typecodes in specializations: # Pick the correct specialized function signature = [CYTHON_SIGNATURE_MAP[code] for j, code in enumerate(typecodes) if is_fused_code[j]] if signature: cy_spec_func = cyfunc[tuple(signature)] else: signature = None cy_spec_func = cyfunc # Test it pts = _generate_test_points(typecodes) for pt in pts: with suppress_warnings() as sup: sup.filter(DeprecationWarning) pyval = pyfunc(*pt) cyval = cy_spec_func(*pt) assert_allclose(cyval, pyval, err_msg="{} {} {}".format(pt, typecodes, signature))
endolith/scipy
scipy/special/tests/test_cython_special.py
scipy/sparse/linalg/eigen/arpack/__init__.py
""" Sparse linear algebra (:mod:`scipy.sparse.linalg`) ================================================== .. currentmodule:: scipy.sparse.linalg Abstract linear operators ------------------------- .. autosummary:: :toctree: generated/ LinearOperator -- abstract representation of a linear operator aslinearoperator -- convert an object to an abstract linear operator Matrix Operations ----------------- .. autosummary:: :toctree: generated/ inv -- compute the sparse matrix inverse expm -- compute the sparse matrix exponential expm_multiply -- compute the product of a matrix exponential and a matrix Matrix norms ------------ .. autosummary:: :toctree: generated/ norm -- Norm of a sparse matrix onenormest -- Estimate the 1-norm of a sparse matrix Solving linear problems ----------------------- Direct methods for linear equation systems: .. autosummary:: :toctree: generated/ spsolve -- Solve the sparse linear system Ax=b spsolve_triangular -- Solve the sparse linear system Ax=b for a triangular matrix factorized -- Pre-factorize matrix to a function solving a linear system MatrixRankWarning -- Warning on exactly singular matrices use_solver -- Select direct solver to use Iterative methods for linear equation systems: .. autosummary:: :toctree: generated/ bicg -- Use BIConjugate Gradient iteration to solve A x = b bicgstab -- Use BIConjugate Gradient STABilized iteration to solve A x = b cg -- Use Conjugate Gradient iteration to solve A x = b cgs -- Use Conjugate Gradient Squared iteration to solve A x = b gmres -- Use Generalized Minimal RESidual iteration to solve A x = b lgmres -- Solve a matrix equation using the LGMRES algorithm minres -- Use MINimum RESidual iteration to solve Ax = b qmr -- Use Quasi-Minimal Residual iteration to solve A x = b gcrotmk -- Solve a matrix equation using the GCROT(m,k) algorithm Iterative methods for least-squares problems: .. autosummary:: :toctree: generated/ lsqr -- Find the least-squares solution to a sparse linear equation system lsmr -- Find the least-squares solution to a sparse linear equation system Matrix factorizations --------------------- Eigenvalue problems: .. autosummary:: :toctree: generated/ eigs -- Find k eigenvalues and eigenvectors of the square matrix A eigsh -- Find k eigenvalues and eigenvectors of a symmetric matrix lobpcg -- Solve symmetric partial eigenproblems with optional preconditioning Singular values problems: .. autosummary:: :toctree: generated/ svds -- Compute k singular values/vectors for a sparse matrix The `svds` function supports the following solvers: .. toctree:: sparse.linalg.svds-arpack sparse.linalg.svds-lobpcg Complete or incomplete LU factorizations .. autosummary:: :toctree: generated/ splu -- Compute a LU decomposition for a sparse matrix spilu -- Compute an incomplete LU decomposition for a sparse matrix SuperLU -- Object representing an LU factorization Exceptions ---------- .. autosummary:: :toctree: generated/ ArpackNoConvergence ArpackError """ from .isolve import * from .dsolve import * from .interface import * from .eigen import * from .matfuncs import * from ._onenormest import * from ._norm import * from ._expm_multiply import * __all__ = [s for s in dir() if not s.startswith('_')] from scipy._lib._testutils import PytestTester test = PytestTester(__name__) del PytestTester
from __future__ import annotations from typing import List, Tuple, Callable, Optional import pytest from itertools import product from numpy.testing import assert_allclose, suppress_warnings from scipy import special from scipy.special import cython_special bint_points = [True, False] int_points = [-10, -1, 1, 10] real_points = [-10.0, -1.0, 1.0, 10.0] complex_points = [complex(*tup) for tup in product(real_points, repeat=2)] CYTHON_SIGNATURE_MAP = { 'b': 'bint', 'f': 'float', 'd': 'double', 'g': 'long double', 'F': 'float complex', 'D': 'double complex', 'G': 'long double complex', 'i': 'int', 'l': 'long' } TEST_POINTS = { 'b': bint_points, 'f': real_points, 'd': real_points, 'g': real_points, 'F': complex_points, 'D': complex_points, 'G': complex_points, 'i': int_points, 'l': int_points, } PARAMS: List[Tuple[Callable, Callable, Tuple[str, ...], Optional[str]]] = [ (special.agm, cython_special.agm, ('dd',), None), (special.airy, cython_special._airy_pywrap, ('d', 'D'), None), (special.airye, cython_special._airye_pywrap, ('d', 'D'), None), (special.bdtr, cython_special.bdtr, ('dld', 'ddd'), None), (special.bdtrc, cython_special.bdtrc, ('dld', 'ddd'), None), (special.bdtri, cython_special.bdtri, ('dld', 'ddd'), None), (special.bdtrik, cython_special.bdtrik, ('ddd',), None), (special.bdtrin, cython_special.bdtrin, ('ddd',), None), (special.bei, cython_special.bei, ('d',), None), (special.beip, cython_special.beip, ('d',), None), (special.ber, cython_special.ber, ('d',), None), (special.berp, cython_special.berp, ('d',), None), (special.besselpoly, cython_special.besselpoly, ('ddd',), None), (special.beta, cython_special.beta, ('dd',), None), (special.betainc, cython_special.betainc, ('ddd',), None), (special.betaincinv, cython_special.betaincinv, ('ddd',), None), (special.betaln, cython_special.betaln, ('dd',), None), (special.binom, cython_special.binom, ('dd',), None), (special.boxcox, cython_special.boxcox, ('dd',), None), (special.boxcox1p, cython_special.boxcox1p, ('dd',), None), (special.btdtr, cython_special.btdtr, ('ddd',), None), (special.btdtri, cython_special.btdtri, ('ddd',), None), (special.btdtria, cython_special.btdtria, ('ddd',), None), (special.btdtrib, cython_special.btdtrib, ('ddd',), None), (special.cbrt, cython_special.cbrt, ('d',), None), (special.chdtr, cython_special.chdtr, ('dd',), None), (special.chdtrc, cython_special.chdtrc, ('dd',), None), (special.chdtri, cython_special.chdtri, ('dd',), None), (special.chdtriv, cython_special.chdtriv, ('dd',), None), (special.chndtr, cython_special.chndtr, ('ddd',), None), (special.chndtridf, cython_special.chndtridf, ('ddd',), None), (special.chndtrinc, cython_special.chndtrinc, ('ddd',), None), (special.chndtrix, cython_special.chndtrix, ('ddd',), None), (special.cosdg, cython_special.cosdg, ('d',), None), (special.cosm1, cython_special.cosm1, ('d',), None), (special.cotdg, cython_special.cotdg, ('d',), None), (special.dawsn, cython_special.dawsn, ('d', 'D'), None), (special.ellipe, cython_special.ellipe, ('d',), None), (special.ellipeinc, cython_special.ellipeinc, ('dd',), None), (special.ellipj, cython_special._ellipj_pywrap, ('dd',), None), (special.ellipkinc, cython_special.ellipkinc, ('dd',), None), (special.ellipkm1, cython_special.ellipkm1, ('d',), None), (special.ellipk, cython_special.ellipk, ('d',), None), (special.entr, cython_special.entr, ('d',), None), (special.erf, cython_special.erf, ('d', 'D'), None), (special.erfc, cython_special.erfc, ('d', 'D'), None), (special.erfcx, cython_special.erfcx, ('d', 'D'), None), (special.erfi, cython_special.erfi, ('d', 'D'), None), (special.erfinv, cython_special.erfinv, ('d',), None), (special.erfcinv, cython_special.erfcinv, ('d',), None), (special.eval_chebyc, cython_special.eval_chebyc, ('dd', 'dD', 'ld'), None), (special.eval_chebys, cython_special.eval_chebys, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyt, cython_special.eval_chebyt, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyu, cython_special.eval_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_gegenbauer, cython_special.eval_gegenbauer, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_genlaguerre, cython_special.eval_genlaguerre, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_hermite, cython_special.eval_hermite, ('ld',), None), (special.eval_hermitenorm, cython_special.eval_hermitenorm, ('ld',), None), (special.eval_jacobi, cython_special.eval_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_laguerre, cython_special.eval_laguerre, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_legendre, cython_special.eval_legendre, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyt, cython_special.eval_sh_chebyt, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyu, cython_special.eval_sh_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_sh_jacobi, cython_special.eval_sh_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_sh_legendre, cython_special.eval_sh_legendre, ('dd', 'dD', 'ld'), None), (special.exp1, cython_special.exp1, ('d', 'D'), None), (special.exp10, cython_special.exp10, ('d',), None), (special.exp2, cython_special.exp2, ('d',), None), (special.expi, cython_special.expi, ('d', 'D'), None), (special.expit, cython_special.expit, ('f', 'd', 'g'), None), (special.expm1, cython_special.expm1, ('d', 'D'), None), (special.expn, cython_special.expn, ('ld', 'dd'), None), (special.exprel, cython_special.exprel, ('d',), None), (special.fdtr, cython_special.fdtr, ('ddd',), None), (special.fdtrc, cython_special.fdtrc, ('ddd',), None), (special.fdtri, cython_special.fdtri, ('ddd',), None), (special.fdtridfd, cython_special.fdtridfd, ('ddd',), None), (special.fresnel, cython_special._fresnel_pywrap, ('d', 'D'), None), (special.gamma, cython_special.gamma, ('d', 'D'), None), (special.gammainc, cython_special.gammainc, ('dd',), None), (special.gammaincc, cython_special.gammaincc, ('dd',), None), (special.gammainccinv, cython_special.gammainccinv, ('dd',), None), (special.gammaincinv, cython_special.gammaincinv, ('dd',), None), (special.gammaln, cython_special.gammaln, ('d',), None), (special.gammasgn, cython_special.gammasgn, ('d',), None), (special.gdtr, cython_special.gdtr, ('ddd',), None), (special.gdtrc, cython_special.gdtrc, ('ddd',), None), (special.gdtria, cython_special.gdtria, ('ddd',), None), (special.gdtrib, cython_special.gdtrib, ('ddd',), None), (special.gdtrix, cython_special.gdtrix, ('ddd',), None), (special.hankel1, cython_special.hankel1, ('dD',), None), (special.hankel1e, cython_special.hankel1e, ('dD',), None), (special.hankel2, cython_special.hankel2, ('dD',), None), (special.hankel2e, cython_special.hankel2e, ('dD',), None), (special.huber, cython_special.huber, ('dd',), None), (special.hyp0f1, cython_special.hyp0f1, ('dd', 'dD'), None), (special.hyp1f1, cython_special.hyp1f1, ('ddd', 'ddD'), None), (special.hyp2f1, cython_special.hyp2f1, ('dddd', 'dddD'), None), (special.hyperu, cython_special.hyperu, ('ddd',), None), (special.i0, cython_special.i0, ('d',), None), (special.i0e, cython_special.i0e, ('d',), None), (special.i1, cython_special.i1, ('d',), None), (special.i1e, cython_special.i1e, ('d',), None), (special.inv_boxcox, cython_special.inv_boxcox, ('dd',), None), (special.inv_boxcox1p, cython_special.inv_boxcox1p, ('dd',), None), (special.it2i0k0, cython_special._it2i0k0_pywrap, ('d',), None), (special.it2j0y0, cython_special._it2j0y0_pywrap, ('d',), None), (special.it2struve0, cython_special.it2struve0, ('d',), None), (special.itairy, cython_special._itairy_pywrap, ('d',), None), (special.iti0k0, cython_special._iti0k0_pywrap, ('d',), None), (special.itj0y0, cython_special._itj0y0_pywrap, ('d',), None), (special.itmodstruve0, cython_special.itmodstruve0, ('d',), None), (special.itstruve0, cython_special.itstruve0, ('d',), None), (special.iv, cython_special.iv, ('dd', 'dD'), None), (special.ive, cython_special.ive, ('dd', 'dD'), None), (special.j0, cython_special.j0, ('d',), None), (special.j1, cython_special.j1, ('d',), None), (special.jv, cython_special.jv, ('dd', 'dD'), None), (special.jve, cython_special.jve, ('dd', 'dD'), None), (special.k0, cython_special.k0, ('d',), None), (special.k0e, cython_special.k0e, ('d',), None), (special.k1, cython_special.k1, ('d',), None), (special.k1e, cython_special.k1e, ('d',), None), (special.kei, cython_special.kei, ('d',), None), (special.keip, cython_special.keip, ('d',), None), (special.kelvin, cython_special._kelvin_pywrap, ('d',), None), (special.ker, cython_special.ker, ('d',), None), (special.kerp, cython_special.kerp, ('d',), None), (special.kl_div, cython_special.kl_div, ('dd',), None), (special.kn, cython_special.kn, ('ld', 'dd'), None), (special.kolmogi, cython_special.kolmogi, ('d',), None), (special.kolmogorov, cython_special.kolmogorov, ('d',), None), (special.kv, cython_special.kv, ('dd', 'dD'), None), (special.kve, cython_special.kve, ('dd', 'dD'), None), (special.log1p, cython_special.log1p, ('d', 'D'), None), (special.log_ndtr, cython_special.log_ndtr, ('d', 'D'), None), (special.ndtri_exp, cython_special.ndtri_exp, ('d',), None), (special.loggamma, cython_special.loggamma, ('D',), None), (special.logit, cython_special.logit, ('f', 'd', 'g'), None), (special.lpmv, cython_special.lpmv, ('ddd',), None), (special.mathieu_a, cython_special.mathieu_a, ('dd',), None), (special.mathieu_b, cython_special.mathieu_b, ('dd',), None), (special.mathieu_cem, cython_special._mathieu_cem_pywrap, ('ddd',), None), (special.mathieu_modcem1, cython_special._mathieu_modcem1_pywrap, ('ddd',), None), (special.mathieu_modcem2, cython_special._mathieu_modcem2_pywrap, ('ddd',), None), (special.mathieu_modsem1, cython_special._mathieu_modsem1_pywrap, ('ddd',), None), (special.mathieu_modsem2, cython_special._mathieu_modsem2_pywrap, ('ddd',), None), (special.mathieu_sem, cython_special._mathieu_sem_pywrap, ('ddd',), None), (special.modfresnelm, cython_special._modfresnelm_pywrap, ('d',), None), (special.modfresnelp, cython_special._modfresnelp_pywrap, ('d',), None), (special.modstruve, cython_special.modstruve, ('dd',), None), (special.nbdtr, cython_special.nbdtr, ('lld', 'ddd'), None), (special.nbdtrc, cython_special.nbdtrc, ('lld', 'ddd'), None), (special.nbdtri, cython_special.nbdtri, ('lld', 'ddd'), None), (special.nbdtrik, cython_special.nbdtrik, ('ddd',), None), (special.nbdtrin, cython_special.nbdtrin, ('ddd',), None), (special.ncfdtr, cython_special.ncfdtr, ('dddd',), None), (special.ncfdtri, cython_special.ncfdtri, ('dddd',), None), (special.ncfdtridfd, cython_special.ncfdtridfd, ('dddd',), None), (special.ncfdtridfn, cython_special.ncfdtridfn, ('dddd',), None), (special.ncfdtrinc, cython_special.ncfdtrinc, ('dddd',), None), (special.nctdtr, cython_special.nctdtr, ('ddd',), None), (special.nctdtridf, cython_special.nctdtridf, ('ddd',), None), (special.nctdtrinc, cython_special.nctdtrinc, ('ddd',), None), (special.nctdtrit, cython_special.nctdtrit, ('ddd',), None), (special.ndtr, cython_special.ndtr, ('d', 'D'), None), (special.ndtri, cython_special.ndtri, ('d',), None), (special.nrdtrimn, cython_special.nrdtrimn, ('ddd',), None), (special.nrdtrisd, cython_special.nrdtrisd, ('ddd',), None), (special.obl_ang1, cython_special._obl_ang1_pywrap, ('dddd',), None), (special.obl_ang1_cv, cython_special._obl_ang1_cv_pywrap, ('ddddd',), None), (special.obl_cv, cython_special.obl_cv, ('ddd',), None), (special.obl_rad1, cython_special._obl_rad1_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad1_cv, cython_special._obl_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.obl_rad2, cython_special._obl_rad2_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad2_cv, cython_special._obl_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pbdv, cython_special._pbdv_pywrap, ('dd',), None), (special.pbvv, cython_special._pbvv_pywrap, ('dd',), None), (special.pbwa, cython_special._pbwa_pywrap, ('dd',), None), (special.pdtr, cython_special.pdtr, ('dd', 'dd'), None), (special.pdtrc, cython_special.pdtrc, ('dd', 'dd'), None), (special.pdtri, cython_special.pdtri, ('ld', 'dd'), None), (special.pdtrik, cython_special.pdtrik, ('dd',), None), (special.poch, cython_special.poch, ('dd',), None), (special.pro_ang1, cython_special._pro_ang1_pywrap, ('dddd',), None), (special.pro_ang1_cv, cython_special._pro_ang1_cv_pywrap, ('ddddd',), None), (special.pro_cv, cython_special.pro_cv, ('ddd',), None), (special.pro_rad1, cython_special._pro_rad1_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad1_cv, cython_special._pro_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pro_rad2, cython_special._pro_rad2_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad2_cv, cython_special._pro_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pseudo_huber, cython_special.pseudo_huber, ('dd',), None), (special.psi, cython_special.psi, ('d', 'D'), None), (special.radian, cython_special.radian, ('ddd',), None), (special.rel_entr, cython_special.rel_entr, ('dd',), None), (special.rgamma, cython_special.rgamma, ('d', 'D'), None), (special.round, cython_special.round, ('d',), None), (special.spherical_jn, cython_special.spherical_jn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_yn, cython_special.spherical_yn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_in, cython_special.spherical_in, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_kn, cython_special.spherical_kn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.shichi, cython_special._shichi_pywrap, ('d', 'D'), None), (special.sici, cython_special._sici_pywrap, ('d', 'D'), None), (special.sindg, cython_special.sindg, ('d',), None), (special.smirnov, cython_special.smirnov, ('ld', 'dd'), None), (special.smirnovi, cython_special.smirnovi, ('ld', 'dd'), None), (special.spence, cython_special.spence, ('d', 'D'), None), (special.sph_harm, cython_special.sph_harm, ('lldd', 'dddd'), None), (special.stdtr, cython_special.stdtr, ('dd',), None), (special.stdtridf, cython_special.stdtridf, ('dd',), None), (special.stdtrit, cython_special.stdtrit, ('dd',), None), (special.struve, cython_special.struve, ('dd',), None), (special.tandg, cython_special.tandg, ('d',), None), (special.tklmbda, cython_special.tklmbda, ('dd',), None), (special.voigt_profile, cython_special.voigt_profile, ('ddd',), None), (special.wofz, cython_special.wofz, ('D',), None), (special.wright_bessel, cython_special.wright_bessel, ('ddd',), None), (special.wrightomega, cython_special.wrightomega, ('D',), None), (special.xlog1py, cython_special.xlog1py, ('dd', 'DD'), None), (special.xlogy, cython_special.xlogy, ('dd', 'DD'), None), (special.y0, cython_special.y0, ('d',), None), (special.y1, cython_special.y1, ('d',), None), (special.yn, cython_special.yn, ('ld', 'dd'), None), (special.yv, cython_special.yv, ('dd', 'dD'), None), (special.yve, cython_special.yve, ('dd', 'dD'), None), (special.zetac, cython_special.zetac, ('d',), None), (special.owens_t, cython_special.owens_t, ('dd',), None) ] IDS = [x[0].__name__ for x in PARAMS] def _generate_test_points(typecodes): axes = tuple(TEST_POINTS[x] for x in typecodes) pts = list(product(*axes)) return pts def test_cython_api_completeness(): # Check that everything is tested for name in dir(cython_special): func = getattr(cython_special, name) if callable(func) and not name.startswith('_'): for _, cyfun, _, _ in PARAMS: if cyfun is func: break else: raise RuntimeError(f"{name} missing from tests!") @pytest.mark.parametrize("param", PARAMS, ids=IDS) def test_cython_api(param): pyfunc, cyfunc, specializations, knownfailure = param if knownfailure: pytest.xfail(reason=knownfailure) # Check which parameters are expected to be fused types max_params = max(len(spec) for spec in specializations) values = [set() for _ in range(max_params)] for typecodes in specializations: for j, v in enumerate(typecodes): values[j].add(v) seen = set() is_fused_code = [False] * len(values) for j, v in enumerate(values): vv = tuple(sorted(v)) if vv in seen: continue is_fused_code[j] = (len(v) > 1) seen.add(vv) # Check results for typecodes in specializations: # Pick the correct specialized function signature = [CYTHON_SIGNATURE_MAP[code] for j, code in enumerate(typecodes) if is_fused_code[j]] if signature: cy_spec_func = cyfunc[tuple(signature)] else: signature = None cy_spec_func = cyfunc # Test it pts = _generate_test_points(typecodes) for pt in pts: with suppress_warnings() as sup: sup.filter(DeprecationWarning) pyval = pyfunc(*pt) cyval = cy_spec_func(*pt) assert_allclose(cyval, pyval, err_msg="{} {} {}".format(pt, typecodes, signature))
endolith/scipy
scipy/special/tests/test_cython_special.py
scipy/sparse/linalg/__init__.py
import os from numpy.distutils.core import setup from numpy.distutils.misc_util import Configuration from numpy import get_include from scipy._build_utils import numpy_nodepr_api def configuration(parent_package='', top_path=None): config = Configuration('ndimage', parent_package, top_path) include_dirs = ['src', get_include(), os.path.join(os.path.dirname(__file__), '..', '_lib', 'src')] config.add_extension("_nd_image", sources=["src/nd_image.c", "src/ni_filters.c", "src/ni_fourier.c", "src/ni_interpolation.c", "src/ni_measure.c", "src/ni_morphology.c", "src/ni_splines.c", "src/ni_support.c"], include_dirs=include_dirs, **numpy_nodepr_api) # Cython wants the .c and .pyx to have the underscore. config.add_extension("_ni_label", sources=["src/_ni_label.c",], include_dirs=['src']+[get_include()]) config.add_extension("_ctest", sources=["src/_ctest.c"], include_dirs=[get_include()], **numpy_nodepr_api) config.add_extension("_cytest", sources=["src/_cytest.c"]) config.add_data_dir('tests') return config if __name__ == '__main__': setup(**configuration(top_path='').todict())
from __future__ import annotations from typing import List, Tuple, Callable, Optional import pytest from itertools import product from numpy.testing import assert_allclose, suppress_warnings from scipy import special from scipy.special import cython_special bint_points = [True, False] int_points = [-10, -1, 1, 10] real_points = [-10.0, -1.0, 1.0, 10.0] complex_points = [complex(*tup) for tup in product(real_points, repeat=2)] CYTHON_SIGNATURE_MAP = { 'b': 'bint', 'f': 'float', 'd': 'double', 'g': 'long double', 'F': 'float complex', 'D': 'double complex', 'G': 'long double complex', 'i': 'int', 'l': 'long' } TEST_POINTS = { 'b': bint_points, 'f': real_points, 'd': real_points, 'g': real_points, 'F': complex_points, 'D': complex_points, 'G': complex_points, 'i': int_points, 'l': int_points, } PARAMS: List[Tuple[Callable, Callable, Tuple[str, ...], Optional[str]]] = [ (special.agm, cython_special.agm, ('dd',), None), (special.airy, cython_special._airy_pywrap, ('d', 'D'), None), (special.airye, cython_special._airye_pywrap, ('d', 'D'), None), (special.bdtr, cython_special.bdtr, ('dld', 'ddd'), None), (special.bdtrc, cython_special.bdtrc, ('dld', 'ddd'), None), (special.bdtri, cython_special.bdtri, ('dld', 'ddd'), None), (special.bdtrik, cython_special.bdtrik, ('ddd',), None), (special.bdtrin, cython_special.bdtrin, ('ddd',), None), (special.bei, cython_special.bei, ('d',), None), (special.beip, cython_special.beip, ('d',), None), (special.ber, cython_special.ber, ('d',), None), (special.berp, cython_special.berp, ('d',), None), (special.besselpoly, cython_special.besselpoly, ('ddd',), None), (special.beta, cython_special.beta, ('dd',), None), (special.betainc, cython_special.betainc, ('ddd',), None), (special.betaincinv, cython_special.betaincinv, ('ddd',), None), (special.betaln, cython_special.betaln, ('dd',), None), (special.binom, cython_special.binom, ('dd',), None), (special.boxcox, cython_special.boxcox, ('dd',), None), (special.boxcox1p, cython_special.boxcox1p, ('dd',), None), (special.btdtr, cython_special.btdtr, ('ddd',), None), (special.btdtri, cython_special.btdtri, ('ddd',), None), (special.btdtria, cython_special.btdtria, ('ddd',), None), (special.btdtrib, cython_special.btdtrib, ('ddd',), None), (special.cbrt, cython_special.cbrt, ('d',), None), (special.chdtr, cython_special.chdtr, ('dd',), None), (special.chdtrc, cython_special.chdtrc, ('dd',), None), (special.chdtri, cython_special.chdtri, ('dd',), None), (special.chdtriv, cython_special.chdtriv, ('dd',), None), (special.chndtr, cython_special.chndtr, ('ddd',), None), (special.chndtridf, cython_special.chndtridf, ('ddd',), None), (special.chndtrinc, cython_special.chndtrinc, ('ddd',), None), (special.chndtrix, cython_special.chndtrix, ('ddd',), None), (special.cosdg, cython_special.cosdg, ('d',), None), (special.cosm1, cython_special.cosm1, ('d',), None), (special.cotdg, cython_special.cotdg, ('d',), None), (special.dawsn, cython_special.dawsn, ('d', 'D'), None), (special.ellipe, cython_special.ellipe, ('d',), None), (special.ellipeinc, cython_special.ellipeinc, ('dd',), None), (special.ellipj, cython_special._ellipj_pywrap, ('dd',), None), (special.ellipkinc, cython_special.ellipkinc, ('dd',), None), (special.ellipkm1, cython_special.ellipkm1, ('d',), None), (special.ellipk, cython_special.ellipk, ('d',), None), (special.entr, cython_special.entr, ('d',), None), (special.erf, cython_special.erf, ('d', 'D'), None), (special.erfc, cython_special.erfc, ('d', 'D'), None), (special.erfcx, cython_special.erfcx, ('d', 'D'), None), (special.erfi, cython_special.erfi, ('d', 'D'), None), (special.erfinv, cython_special.erfinv, ('d',), None), (special.erfcinv, cython_special.erfcinv, ('d',), None), (special.eval_chebyc, cython_special.eval_chebyc, ('dd', 'dD', 'ld'), None), (special.eval_chebys, cython_special.eval_chebys, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyt, cython_special.eval_chebyt, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_chebyu, cython_special.eval_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_gegenbauer, cython_special.eval_gegenbauer, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_genlaguerre, cython_special.eval_genlaguerre, ('ddd', 'ddD', 'ldd'), 'd and l differ for negative int'), (special.eval_hermite, cython_special.eval_hermite, ('ld',), None), (special.eval_hermitenorm, cython_special.eval_hermitenorm, ('ld',), None), (special.eval_jacobi, cython_special.eval_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_laguerre, cython_special.eval_laguerre, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_legendre, cython_special.eval_legendre, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyt, cython_special.eval_sh_chebyt, ('dd', 'dD', 'ld'), None), (special.eval_sh_chebyu, cython_special.eval_sh_chebyu, ('dd', 'dD', 'ld'), 'd and l differ for negative int'), (special.eval_sh_jacobi, cython_special.eval_sh_jacobi, ('dddd', 'dddD', 'lddd'), 'd and l differ for negative int'), (special.eval_sh_legendre, cython_special.eval_sh_legendre, ('dd', 'dD', 'ld'), None), (special.exp1, cython_special.exp1, ('d', 'D'), None), (special.exp10, cython_special.exp10, ('d',), None), (special.exp2, cython_special.exp2, ('d',), None), (special.expi, cython_special.expi, ('d', 'D'), None), (special.expit, cython_special.expit, ('f', 'd', 'g'), None), (special.expm1, cython_special.expm1, ('d', 'D'), None), (special.expn, cython_special.expn, ('ld', 'dd'), None), (special.exprel, cython_special.exprel, ('d',), None), (special.fdtr, cython_special.fdtr, ('ddd',), None), (special.fdtrc, cython_special.fdtrc, ('ddd',), None), (special.fdtri, cython_special.fdtri, ('ddd',), None), (special.fdtridfd, cython_special.fdtridfd, ('ddd',), None), (special.fresnel, cython_special._fresnel_pywrap, ('d', 'D'), None), (special.gamma, cython_special.gamma, ('d', 'D'), None), (special.gammainc, cython_special.gammainc, ('dd',), None), (special.gammaincc, cython_special.gammaincc, ('dd',), None), (special.gammainccinv, cython_special.gammainccinv, ('dd',), None), (special.gammaincinv, cython_special.gammaincinv, ('dd',), None), (special.gammaln, cython_special.gammaln, ('d',), None), (special.gammasgn, cython_special.gammasgn, ('d',), None), (special.gdtr, cython_special.gdtr, ('ddd',), None), (special.gdtrc, cython_special.gdtrc, ('ddd',), None), (special.gdtria, cython_special.gdtria, ('ddd',), None), (special.gdtrib, cython_special.gdtrib, ('ddd',), None), (special.gdtrix, cython_special.gdtrix, ('ddd',), None), (special.hankel1, cython_special.hankel1, ('dD',), None), (special.hankel1e, cython_special.hankel1e, ('dD',), None), (special.hankel2, cython_special.hankel2, ('dD',), None), (special.hankel2e, cython_special.hankel2e, ('dD',), None), (special.huber, cython_special.huber, ('dd',), None), (special.hyp0f1, cython_special.hyp0f1, ('dd', 'dD'), None), (special.hyp1f1, cython_special.hyp1f1, ('ddd', 'ddD'), None), (special.hyp2f1, cython_special.hyp2f1, ('dddd', 'dddD'), None), (special.hyperu, cython_special.hyperu, ('ddd',), None), (special.i0, cython_special.i0, ('d',), None), (special.i0e, cython_special.i0e, ('d',), None), (special.i1, cython_special.i1, ('d',), None), (special.i1e, cython_special.i1e, ('d',), None), (special.inv_boxcox, cython_special.inv_boxcox, ('dd',), None), (special.inv_boxcox1p, cython_special.inv_boxcox1p, ('dd',), None), (special.it2i0k0, cython_special._it2i0k0_pywrap, ('d',), None), (special.it2j0y0, cython_special._it2j0y0_pywrap, ('d',), None), (special.it2struve0, cython_special.it2struve0, ('d',), None), (special.itairy, cython_special._itairy_pywrap, ('d',), None), (special.iti0k0, cython_special._iti0k0_pywrap, ('d',), None), (special.itj0y0, cython_special._itj0y0_pywrap, ('d',), None), (special.itmodstruve0, cython_special.itmodstruve0, ('d',), None), (special.itstruve0, cython_special.itstruve0, ('d',), None), (special.iv, cython_special.iv, ('dd', 'dD'), None), (special.ive, cython_special.ive, ('dd', 'dD'), None), (special.j0, cython_special.j0, ('d',), None), (special.j1, cython_special.j1, ('d',), None), (special.jv, cython_special.jv, ('dd', 'dD'), None), (special.jve, cython_special.jve, ('dd', 'dD'), None), (special.k0, cython_special.k0, ('d',), None), (special.k0e, cython_special.k0e, ('d',), None), (special.k1, cython_special.k1, ('d',), None), (special.k1e, cython_special.k1e, ('d',), None), (special.kei, cython_special.kei, ('d',), None), (special.keip, cython_special.keip, ('d',), None), (special.kelvin, cython_special._kelvin_pywrap, ('d',), None), (special.ker, cython_special.ker, ('d',), None), (special.kerp, cython_special.kerp, ('d',), None), (special.kl_div, cython_special.kl_div, ('dd',), None), (special.kn, cython_special.kn, ('ld', 'dd'), None), (special.kolmogi, cython_special.kolmogi, ('d',), None), (special.kolmogorov, cython_special.kolmogorov, ('d',), None), (special.kv, cython_special.kv, ('dd', 'dD'), None), (special.kve, cython_special.kve, ('dd', 'dD'), None), (special.log1p, cython_special.log1p, ('d', 'D'), None), (special.log_ndtr, cython_special.log_ndtr, ('d', 'D'), None), (special.ndtri_exp, cython_special.ndtri_exp, ('d',), None), (special.loggamma, cython_special.loggamma, ('D',), None), (special.logit, cython_special.logit, ('f', 'd', 'g'), None), (special.lpmv, cython_special.lpmv, ('ddd',), None), (special.mathieu_a, cython_special.mathieu_a, ('dd',), None), (special.mathieu_b, cython_special.mathieu_b, ('dd',), None), (special.mathieu_cem, cython_special._mathieu_cem_pywrap, ('ddd',), None), (special.mathieu_modcem1, cython_special._mathieu_modcem1_pywrap, ('ddd',), None), (special.mathieu_modcem2, cython_special._mathieu_modcem2_pywrap, ('ddd',), None), (special.mathieu_modsem1, cython_special._mathieu_modsem1_pywrap, ('ddd',), None), (special.mathieu_modsem2, cython_special._mathieu_modsem2_pywrap, ('ddd',), None), (special.mathieu_sem, cython_special._mathieu_sem_pywrap, ('ddd',), None), (special.modfresnelm, cython_special._modfresnelm_pywrap, ('d',), None), (special.modfresnelp, cython_special._modfresnelp_pywrap, ('d',), None), (special.modstruve, cython_special.modstruve, ('dd',), None), (special.nbdtr, cython_special.nbdtr, ('lld', 'ddd'), None), (special.nbdtrc, cython_special.nbdtrc, ('lld', 'ddd'), None), (special.nbdtri, cython_special.nbdtri, ('lld', 'ddd'), None), (special.nbdtrik, cython_special.nbdtrik, ('ddd',), None), (special.nbdtrin, cython_special.nbdtrin, ('ddd',), None), (special.ncfdtr, cython_special.ncfdtr, ('dddd',), None), (special.ncfdtri, cython_special.ncfdtri, ('dddd',), None), (special.ncfdtridfd, cython_special.ncfdtridfd, ('dddd',), None), (special.ncfdtridfn, cython_special.ncfdtridfn, ('dddd',), None), (special.ncfdtrinc, cython_special.ncfdtrinc, ('dddd',), None), (special.nctdtr, cython_special.nctdtr, ('ddd',), None), (special.nctdtridf, cython_special.nctdtridf, ('ddd',), None), (special.nctdtrinc, cython_special.nctdtrinc, ('ddd',), None), (special.nctdtrit, cython_special.nctdtrit, ('ddd',), None), (special.ndtr, cython_special.ndtr, ('d', 'D'), None), (special.ndtri, cython_special.ndtri, ('d',), None), (special.nrdtrimn, cython_special.nrdtrimn, ('ddd',), None), (special.nrdtrisd, cython_special.nrdtrisd, ('ddd',), None), (special.obl_ang1, cython_special._obl_ang1_pywrap, ('dddd',), None), (special.obl_ang1_cv, cython_special._obl_ang1_cv_pywrap, ('ddddd',), None), (special.obl_cv, cython_special.obl_cv, ('ddd',), None), (special.obl_rad1, cython_special._obl_rad1_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad1_cv, cython_special._obl_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.obl_rad2, cython_special._obl_rad2_pywrap, ('dddd',), "see gh-6211"), (special.obl_rad2_cv, cython_special._obl_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pbdv, cython_special._pbdv_pywrap, ('dd',), None), (special.pbvv, cython_special._pbvv_pywrap, ('dd',), None), (special.pbwa, cython_special._pbwa_pywrap, ('dd',), None), (special.pdtr, cython_special.pdtr, ('dd', 'dd'), None), (special.pdtrc, cython_special.pdtrc, ('dd', 'dd'), None), (special.pdtri, cython_special.pdtri, ('ld', 'dd'), None), (special.pdtrik, cython_special.pdtrik, ('dd',), None), (special.poch, cython_special.poch, ('dd',), None), (special.pro_ang1, cython_special._pro_ang1_pywrap, ('dddd',), None), (special.pro_ang1_cv, cython_special._pro_ang1_cv_pywrap, ('ddddd',), None), (special.pro_cv, cython_special.pro_cv, ('ddd',), None), (special.pro_rad1, cython_special._pro_rad1_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad1_cv, cython_special._pro_rad1_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pro_rad2, cython_special._pro_rad2_pywrap, ('dddd',), "see gh-6211"), (special.pro_rad2_cv, cython_special._pro_rad2_cv_pywrap, ('ddddd',), "see gh-6211"), (special.pseudo_huber, cython_special.pseudo_huber, ('dd',), None), (special.psi, cython_special.psi, ('d', 'D'), None), (special.radian, cython_special.radian, ('ddd',), None), (special.rel_entr, cython_special.rel_entr, ('dd',), None), (special.rgamma, cython_special.rgamma, ('d', 'D'), None), (special.round, cython_special.round, ('d',), None), (special.spherical_jn, cython_special.spherical_jn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_yn, cython_special.spherical_yn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_in, cython_special.spherical_in, ('ld', 'ldb', 'lD', 'lDb'), None), (special.spherical_kn, cython_special.spherical_kn, ('ld', 'ldb', 'lD', 'lDb'), None), (special.shichi, cython_special._shichi_pywrap, ('d', 'D'), None), (special.sici, cython_special._sici_pywrap, ('d', 'D'), None), (special.sindg, cython_special.sindg, ('d',), None), (special.smirnov, cython_special.smirnov, ('ld', 'dd'), None), (special.smirnovi, cython_special.smirnovi, ('ld', 'dd'), None), (special.spence, cython_special.spence, ('d', 'D'), None), (special.sph_harm, cython_special.sph_harm, ('lldd', 'dddd'), None), (special.stdtr, cython_special.stdtr, ('dd',), None), (special.stdtridf, cython_special.stdtridf, ('dd',), None), (special.stdtrit, cython_special.stdtrit, ('dd',), None), (special.struve, cython_special.struve, ('dd',), None), (special.tandg, cython_special.tandg, ('d',), None), (special.tklmbda, cython_special.tklmbda, ('dd',), None), (special.voigt_profile, cython_special.voigt_profile, ('ddd',), None), (special.wofz, cython_special.wofz, ('D',), None), (special.wright_bessel, cython_special.wright_bessel, ('ddd',), None), (special.wrightomega, cython_special.wrightomega, ('D',), None), (special.xlog1py, cython_special.xlog1py, ('dd', 'DD'), None), (special.xlogy, cython_special.xlogy, ('dd', 'DD'), None), (special.y0, cython_special.y0, ('d',), None), (special.y1, cython_special.y1, ('d',), None), (special.yn, cython_special.yn, ('ld', 'dd'), None), (special.yv, cython_special.yv, ('dd', 'dD'), None), (special.yve, cython_special.yve, ('dd', 'dD'), None), (special.zetac, cython_special.zetac, ('d',), None), (special.owens_t, cython_special.owens_t, ('dd',), None) ] IDS = [x[0].__name__ for x in PARAMS] def _generate_test_points(typecodes): axes = tuple(TEST_POINTS[x] for x in typecodes) pts = list(product(*axes)) return pts def test_cython_api_completeness(): # Check that everything is tested for name in dir(cython_special): func = getattr(cython_special, name) if callable(func) and not name.startswith('_'): for _, cyfun, _, _ in PARAMS: if cyfun is func: break else: raise RuntimeError(f"{name} missing from tests!") @pytest.mark.parametrize("param", PARAMS, ids=IDS) def test_cython_api(param): pyfunc, cyfunc, specializations, knownfailure = param if knownfailure: pytest.xfail(reason=knownfailure) # Check which parameters are expected to be fused types max_params = max(len(spec) for spec in specializations) values = [set() for _ in range(max_params)] for typecodes in specializations: for j, v in enumerate(typecodes): values[j].add(v) seen = set() is_fused_code = [False] * len(values) for j, v in enumerate(values): vv = tuple(sorted(v)) if vv in seen: continue is_fused_code[j] = (len(v) > 1) seen.add(vv) # Check results for typecodes in specializations: # Pick the correct specialized function signature = [CYTHON_SIGNATURE_MAP[code] for j, code in enumerate(typecodes) if is_fused_code[j]] if signature: cy_spec_func = cyfunc[tuple(signature)] else: signature = None cy_spec_func = cyfunc # Test it pts = _generate_test_points(typecodes) for pt in pts: with suppress_warnings() as sup: sup.filter(DeprecationWarning) pyval = pyfunc(*pt) cyval = cy_spec_func(*pt) assert_allclose(cyval, pyval, err_msg="{} {} {}".format(pt, typecodes, signature))
endolith/scipy
scipy/special/tests/test_cython_special.py
scipy/ndimage/setup.py
from warnings import catch_warnings import numpy as np from pandas.core.dtypes import generic as gt import pandas as pd import pandas._testing as tm class TestABCClasses: tuples = [[1, 2, 2], ["red", "blue", "red"]] multi_index = pd.MultiIndex.from_arrays(tuples, names=("number", "color")) datetime_index = pd.to_datetime(["2000/1/1", "2010/1/1"]) timedelta_index = pd.to_timedelta(np.arange(5), unit="s") period_index = pd.period_range("2000/1/1", "2010/1/1/", freq="M") categorical = pd.Categorical([1, 2, 3], categories=[2, 3, 1]) categorical_df = pd.DataFrame({"values": [1, 2, 3]}, index=categorical) df = pd.DataFrame({"names": ["a", "b", "c"]}, index=multi_index) sparse_array = pd.arrays.SparseArray(np.random.randn(10)) datetime_array = pd.core.arrays.DatetimeArray(datetime_index) timedelta_array = pd.core.arrays.TimedeltaArray(timedelta_index) def test_abc_types(self): assert isinstance(pd.Index(["a", "b", "c"]), gt.ABCIndex) assert isinstance(pd.Int64Index([1, 2, 3]), gt.ABCInt64Index) assert isinstance(pd.UInt64Index([1, 2, 3]), gt.ABCUInt64Index) assert isinstance(pd.Float64Index([1, 2, 3]), gt.ABCFloat64Index) assert isinstance(self.multi_index, gt.ABCMultiIndex) assert isinstance(self.datetime_index, gt.ABCDatetimeIndex) assert isinstance(self.timedelta_index, gt.ABCTimedeltaIndex) assert isinstance(self.period_index, gt.ABCPeriodIndex) assert isinstance(self.categorical_df.index, gt.ABCCategoricalIndex) assert isinstance(pd.Index(["a", "b", "c"]), gt.ABCIndexClass) assert isinstance(pd.Int64Index([1, 2, 3]), gt.ABCIndexClass) assert isinstance(pd.Series([1, 2, 3]), gt.ABCSeries) assert isinstance(self.df, gt.ABCDataFrame) assert isinstance(self.sparse_array, gt.ABCExtensionArray) assert isinstance(self.categorical, gt.ABCCategorical) assert isinstance(self.datetime_array, gt.ABCDatetimeArray) assert not isinstance(self.datetime_index, gt.ABCDatetimeArray) assert isinstance(self.timedelta_array, gt.ABCTimedeltaArray) assert not isinstance(self.timedelta_index, gt.ABCTimedeltaArray) def test_setattr_warnings(): # GH7175 - GOTCHA: You can't use dot notation to add a column... d = { "one": pd.Series([1.0, 2.0, 3.0], index=["a", "b", "c"]), "two": pd.Series([1.0, 2.0, 3.0, 4.0], index=["a", "b", "c", "d"]), } df = pd.DataFrame(d) with catch_warnings(record=True) as w: # successfully add new column # this should not raise a warning df["three"] = df.two + 1 assert len(w) == 0 assert df.three.sum() > df.two.sum() with catch_warnings(record=True) as w: # successfully modify column in place # this should not raise a warning df.one += 1 assert len(w) == 0 assert df.one.iloc[0] == 2 with catch_warnings(record=True) as w: # successfully add an attribute to a series # this should not raise a warning df.two.not_an_index = [1, 2] assert len(w) == 0 with tm.assert_produces_warning(UserWarning): # warn when setting column to nonexistent name df.four = df.two + 2 assert df.four.sum() > df.two.sum()
""" test the scalar Timestamp """ import calendar from datetime import datetime, timedelta import locale import unicodedata from dateutil.tz import tzutc import numpy as np import pytest import pytz from pytz import timezone, utc from pandas._libs.tslibs.timezones import dateutil_gettz as gettz, get_timezone from pandas.compat.numpy import np_datetime64_compat import pandas.util._test_decorators as td from pandas import NaT, Timedelta, Timestamp import pandas._testing as tm from pandas.tseries import offsets class TestTimestampProperties: def test_properties_business(self): ts = Timestamp("2017-10-01", freq="B") control = Timestamp("2017-10-01") assert ts.dayofweek == 6 assert not ts.is_month_start # not a weekday assert not ts.is_quarter_start # not a weekday # Control case: non-business is month/qtr start assert control.is_month_start assert control.is_quarter_start ts = Timestamp("2017-09-30", freq="B") control = Timestamp("2017-09-30") assert ts.dayofweek == 5 assert not ts.is_month_end # not a weekday assert not ts.is_quarter_end # not a weekday # Control case: non-business is month/qtr start assert control.is_month_end assert control.is_quarter_end def test_fields(self): def check(value, equal): # that we are int like assert isinstance(value, int) assert value == equal # GH 10050 ts = Timestamp("2015-05-10 09:06:03.000100001") check(ts.year, 2015) check(ts.month, 5) check(ts.day, 10) check(ts.hour, 9) check(ts.minute, 6) check(ts.second, 3) msg = "'Timestamp' object has no attribute 'millisecond'" with pytest.raises(AttributeError, match=msg): ts.millisecond check(ts.microsecond, 100) check(ts.nanosecond, 1) check(ts.dayofweek, 6) check(ts.quarter, 2) check(ts.dayofyear, 130) check(ts.week, 19) check(ts.daysinmonth, 31) check(ts.daysinmonth, 31) # GH 13303 ts = Timestamp("2014-12-31 23:59:00-05:00", tz="US/Eastern") check(ts.year, 2014) check(ts.month, 12) check(ts.day, 31) check(ts.hour, 23) check(ts.minute, 59) check(ts.second, 0) msg = "'Timestamp' object has no attribute 'millisecond'" with pytest.raises(AttributeError, match=msg): ts.millisecond check(ts.microsecond, 0) check(ts.nanosecond, 0) check(ts.dayofweek, 2) check(ts.quarter, 4) check(ts.dayofyear, 365) check(ts.week, 1) check(ts.daysinmonth, 31) ts = Timestamp("2014-01-01 00:00:00+01:00") starts = ["is_month_start", "is_quarter_start", "is_year_start"] for start in starts: assert getattr(ts, start) ts = Timestamp("2014-12-31 23:59:59+01:00") ends = ["is_month_end", "is_year_end", "is_quarter_end"] for end in ends: assert getattr(ts, end) # GH 12806 @pytest.mark.parametrize( "data", [Timestamp("2017-08-28 23:00:00"), Timestamp("2017-08-28 23:00:00", tz="EST")], ) @pytest.mark.parametrize( "time_locale", [None] if tm.get_locales() is None else [None] + tm.get_locales() ) def test_names(self, data, time_locale): # GH 17354 # Test .day_name(), .month_name if time_locale is None: expected_day = "Monday" expected_month = "August" else: with tm.set_locale(time_locale, locale.LC_TIME): expected_day = calendar.day_name[0].capitalize() expected_month = calendar.month_name[8].capitalize() result_day = data.day_name(time_locale) result_month = data.month_name(time_locale) # Work around https://github.com/pandas-dev/pandas/issues/22342 # different normalizations expected_day = unicodedata.normalize("NFD", expected_day) expected_month = unicodedata.normalize("NFD", expected_month) result_day = unicodedata.normalize("NFD", result_day) result_month = unicodedata.normalize("NFD", result_month) assert result_day == expected_day assert result_month == expected_month # Test NaT nan_ts = Timestamp(NaT) assert np.isnan(nan_ts.day_name(time_locale)) assert np.isnan(nan_ts.month_name(time_locale)) def test_is_leap_year(self, tz_naive_fixture): tz = tz_naive_fixture # GH 13727 dt = Timestamp("2000-01-01 00:00:00", tz=tz) assert dt.is_leap_year assert isinstance(dt.is_leap_year, bool) dt = Timestamp("1999-01-01 00:00:00", tz=tz) assert not dt.is_leap_year dt = Timestamp("2004-01-01 00:00:00", tz=tz) assert dt.is_leap_year dt = Timestamp("2100-01-01 00:00:00", tz=tz) assert not dt.is_leap_year def test_woy_boundary(self): # make sure weeks at year boundaries are correct d = datetime(2013, 12, 31) result = Timestamp(d).week expected = 1 # ISO standard assert result == expected d = datetime(2008, 12, 28) result = Timestamp(d).week expected = 52 # ISO standard assert result == expected d = datetime(2009, 12, 31) result = Timestamp(d).week expected = 53 # ISO standard assert result == expected d = datetime(2010, 1, 1) result = Timestamp(d).week expected = 53 # ISO standard assert result == expected d = datetime(2010, 1, 3) result = Timestamp(d).week expected = 53 # ISO standard assert result == expected result = np.array( [ Timestamp(datetime(*args)).week for args in [(2000, 1, 1), (2000, 1, 2), (2005, 1, 1), (2005, 1, 2)] ] ) assert (result == [52, 52, 53, 53]).all() def test_resolution(self): # GH#21336, GH#21365 dt = Timestamp("2100-01-01 00:00:00") assert dt.resolution == Timedelta(nanoseconds=1) # Check that the attribute is available on the class, mirroring # the stdlib datetime behavior assert Timestamp.resolution == Timedelta(nanoseconds=1) class TestTimestamp: def test_tz(self): tstr = "2014-02-01 09:00" ts = Timestamp(tstr) local = ts.tz_localize("Asia/Tokyo") assert local.hour == 9 assert local == Timestamp(tstr, tz="Asia/Tokyo") conv = local.tz_convert("US/Eastern") assert conv == Timestamp("2014-01-31 19:00", tz="US/Eastern") assert conv.hour == 19 # preserves nanosecond ts = Timestamp(tstr) + offsets.Nano(5) local = ts.tz_localize("Asia/Tokyo") assert local.hour == 9 assert local.nanosecond == 5 conv = local.tz_convert("US/Eastern") assert conv.nanosecond == 5 assert conv.hour == 19 def test_utc_z_designator(self): assert get_timezone(Timestamp("2014-11-02 01:00Z").tzinfo) is utc def test_asm8(self): np.random.seed(7_960_929) ns = [Timestamp.min.value, Timestamp.max.value, 1000] for n in ns: assert ( Timestamp(n).asm8.view("i8") == np.datetime64(n, "ns").view("i8") == n ) assert Timestamp("nat").asm8.view("i8") == np.datetime64("nat", "ns").view("i8") def test_class_ops_pytz(self): def compare(x, y): assert int((Timestamp(x).value - Timestamp(y).value) / 1e9) == 0 compare(Timestamp.now(), datetime.now()) compare(Timestamp.now("UTC"), datetime.now(timezone("UTC"))) compare(Timestamp.utcnow(), datetime.utcnow()) compare(Timestamp.today(), datetime.today()) current_time = calendar.timegm(datetime.now().utctimetuple()) compare( Timestamp.utcfromtimestamp(current_time), datetime.utcfromtimestamp(current_time), ) compare( Timestamp.fromtimestamp(current_time), datetime.fromtimestamp(current_time) ) date_component = datetime.utcnow() time_component = (date_component + timedelta(minutes=10)).time() compare( Timestamp.combine(date_component, time_component), datetime.combine(date_component, time_component), ) def test_class_ops_dateutil(self): def compare(x, y): assert ( int( np.round(Timestamp(x).value / 1e9) - np.round(Timestamp(y).value / 1e9) ) == 0 ) compare(Timestamp.now(), datetime.now()) compare(Timestamp.now("UTC"), datetime.now(tzutc())) compare(Timestamp.utcnow(), datetime.utcnow()) compare(Timestamp.today(), datetime.today()) current_time = calendar.timegm(datetime.now().utctimetuple()) compare( Timestamp.utcfromtimestamp(current_time), datetime.utcfromtimestamp(current_time), ) compare( Timestamp.fromtimestamp(current_time), datetime.fromtimestamp(current_time) ) date_component = datetime.utcnow() time_component = (date_component + timedelta(minutes=10)).time() compare( Timestamp.combine(date_component, time_component), datetime.combine(date_component, time_component), ) def test_basics_nanos(self): val = np.int64(946_684_800_000_000_000).view("M8[ns]") stamp = Timestamp(val.view("i8") + 500) assert stamp.year == 2000 assert stamp.month == 1 assert stamp.microsecond == 0 assert stamp.nanosecond == 500 # GH 14415 val = np.iinfo(np.int64).min + 80_000_000_000_000 stamp = Timestamp(val) assert stamp.year == 1677 assert stamp.month == 9 assert stamp.day == 21 assert stamp.microsecond == 145224 assert stamp.nanosecond == 192 @pytest.mark.parametrize( "value, check_kwargs", [ [946688461000000000, {}], [946688461000000000 / 1000, dict(unit="us")], [946688461000000000 / 1_000_000, dict(unit="ms")], [946688461000000000 / 1_000_000_000, dict(unit="s")], [10957, dict(unit="D", h=0)], [ (946688461000000000 + 500000) / 1000000000, dict(unit="s", us=499, ns=964), ], [(946688461000000000 + 500000000) / 1000000000, dict(unit="s", us=500000)], [(946688461000000000 + 500000) / 1000000, dict(unit="ms", us=500)], [(946688461000000000 + 500000) / 1000, dict(unit="us", us=500)], [(946688461000000000 + 500000000) / 1000000, dict(unit="ms", us=500000)], [946688461000000000 / 1000.0 + 5, dict(unit="us", us=5)], [946688461000000000 / 1000.0 + 5000, dict(unit="us", us=5000)], [946688461000000000 / 1000000.0 + 0.5, dict(unit="ms", us=500)], [946688461000000000 / 1000000.0 + 0.005, dict(unit="ms", us=5, ns=5)], [946688461000000000 / 1000000000.0 + 0.5, dict(unit="s", us=500000)], [10957 + 0.5, dict(unit="D", h=12)], ], ) def test_unit(self, value, check_kwargs): def check(value, unit=None, h=1, s=1, us=0, ns=0): stamp = Timestamp(value, unit=unit) assert stamp.year == 2000 assert stamp.month == 1 assert stamp.day == 1 assert stamp.hour == h if unit != "D": assert stamp.minute == 1 assert stamp.second == s assert stamp.microsecond == us else: assert stamp.minute == 0 assert stamp.second == 0 assert stamp.microsecond == 0 assert stamp.nanosecond == ns check(value, **check_kwargs) def test_roundtrip(self): # test value to string and back conversions # further test accessors base = Timestamp("20140101 00:00:00") result = Timestamp(base.value + Timedelta("5ms").value) assert result == Timestamp(f"{base}.005000") assert result.microsecond == 5000 result = Timestamp(base.value + Timedelta("5us").value) assert result == Timestamp(f"{base}.000005") assert result.microsecond == 5 result = Timestamp(base.value + Timedelta("5ns").value) assert result == Timestamp(f"{base}.000000005") assert result.nanosecond == 5 assert result.microsecond == 0 result = Timestamp(base.value + Timedelta("6ms 5us").value) assert result == Timestamp(f"{base}.006005") assert result.microsecond == 5 + 6 * 1000 result = Timestamp(base.value + Timedelta("200ms 5us").value) assert result == Timestamp(f"{base}.200005") assert result.microsecond == 5 + 200 * 1000 def test_hash_equivalent(self): d = {datetime(2011, 1, 1): 5} stamp = Timestamp(datetime(2011, 1, 1)) assert d[stamp] == 5 def test_tz_conversion_freq(self, tz_naive_fixture): # GH25241 t1 = Timestamp("2019-01-01 10:00", freq="H") assert t1.tz_localize(tz=tz_naive_fixture).freq == t1.freq t2 = Timestamp("2019-01-02 12:00", tz="UTC", freq="T") assert t2.tz_convert(tz="UTC").freq == t2.freq class TestTimestampNsOperations: def test_nanosecond_string_parsing(self): ts = Timestamp("2013-05-01 07:15:45.123456789") # GH 7878 expected_repr = "2013-05-01 07:15:45.123456789" expected_value = 1_367_392_545_123_456_789 assert ts.value == expected_value assert expected_repr in repr(ts) ts = Timestamp("2013-05-01 07:15:45.123456789+09:00", tz="Asia/Tokyo") assert ts.value == expected_value - 9 * 3600 * 1_000_000_000 assert expected_repr in repr(ts) ts = Timestamp("2013-05-01 07:15:45.123456789", tz="UTC") assert ts.value == expected_value assert expected_repr in repr(ts) ts = Timestamp("2013-05-01 07:15:45.123456789", tz="US/Eastern") assert ts.value == expected_value + 4 * 3600 * 1_000_000_000 assert expected_repr in repr(ts) # GH 10041 ts = Timestamp("20130501T071545.123456789") assert ts.value == expected_value assert expected_repr in repr(ts) def test_nanosecond_timestamp(self): # GH 7610 expected = 1_293_840_000_000_000_005 t = Timestamp("2011-01-01") + offsets.Nano(5) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000005')" assert t.value == expected assert t.nanosecond == 5 t = Timestamp(t) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000005')" assert t.value == expected assert t.nanosecond == 5 t = Timestamp(np_datetime64_compat("2011-01-01 00:00:00.000000005Z")) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000005')" assert t.value == expected assert t.nanosecond == 5 expected = 1_293_840_000_000_000_010 t = t + offsets.Nano(5) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000010')" assert t.value == expected assert t.nanosecond == 10 t = Timestamp(t) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000010')" assert t.value == expected assert t.nanosecond == 10 t = Timestamp(np_datetime64_compat("2011-01-01 00:00:00.000000010Z")) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000010')" assert t.value == expected assert t.nanosecond == 10 class TestTimestampToJulianDate: def test_compare_1700(self): r = Timestamp("1700-06-23").to_julian_date() assert r == 2_342_145.5 def test_compare_2000(self): r = Timestamp("2000-04-12").to_julian_date() assert r == 2_451_646.5 def test_compare_2100(self): r = Timestamp("2100-08-12").to_julian_date() assert r == 2_488_292.5 def test_compare_hour01(self): r = Timestamp("2000-08-12T01:00:00").to_julian_date() assert r == 2_451_768.5416666666666666 def test_compare_hour13(self): r = Timestamp("2000-08-12T13:00:00").to_julian_date() assert r == 2_451_769.0416666666666666 class TestTimestampConversion: def test_conversion(self): # GH#9255 ts = Timestamp("2000-01-01") result = ts.to_pydatetime() expected = datetime(2000, 1, 1) assert result == expected assert type(result) == type(expected) result = ts.to_datetime64() expected = np.datetime64(ts.value, "ns") assert result == expected assert type(result) == type(expected) assert result.dtype == expected.dtype def test_to_pydatetime_nonzero_nano(self): ts = Timestamp("2011-01-01 9:00:00.123456789") # Warn the user of data loss (nanoseconds). with tm.assert_produces_warning(UserWarning, check_stacklevel=False): expected = datetime(2011, 1, 1, 9, 0, 0, 123456) result = ts.to_pydatetime() assert result == expected def test_timestamp_to_datetime(self): stamp = Timestamp("20090415", tz="US/Eastern", freq="D") dtval = stamp.to_pydatetime() assert stamp == dtval assert stamp.tzinfo == dtval.tzinfo def test_timestamp_to_datetime_dateutil(self): stamp = Timestamp("20090415", tz="dateutil/US/Eastern", freq="D") dtval = stamp.to_pydatetime() assert stamp == dtval assert stamp.tzinfo == dtval.tzinfo def test_timestamp_to_datetime_explicit_pytz(self): stamp = Timestamp("20090415", tz=pytz.timezone("US/Eastern"), freq="D") dtval = stamp.to_pydatetime() assert stamp == dtval assert stamp.tzinfo == dtval.tzinfo @td.skip_if_windows_python_3 def test_timestamp_to_datetime_explicit_dateutil(self): stamp = Timestamp("20090415", tz=gettz("US/Eastern"), freq="D") dtval = stamp.to_pydatetime() assert stamp == dtval assert stamp.tzinfo == dtval.tzinfo def test_to_datetime_bijective(self): # Ensure that converting to datetime and back only loses precision # by going from nanoseconds to microseconds. exp_warning = None if Timestamp.max.nanosecond == 0 else UserWarning with tm.assert_produces_warning(exp_warning, check_stacklevel=False): assert ( Timestamp(Timestamp.max.to_pydatetime()).value / 1000 == Timestamp.max.value / 1000 ) exp_warning = None if Timestamp.min.nanosecond == 0 else UserWarning with tm.assert_produces_warning(exp_warning, check_stacklevel=False): assert ( Timestamp(Timestamp.min.to_pydatetime()).value / 1000 == Timestamp.min.value / 1000 ) def test_to_period_tz_warning(self): # GH#21333 make sure a warning is issued when timezone # info is lost ts = Timestamp("2009-04-15 16:17:18", tz="US/Eastern") with tm.assert_produces_warning(UserWarning): # warning that timezone info will be lost ts.to_period("D") def test_to_numpy_alias(self): # GH 24653: alias .to_numpy() for scalars ts = Timestamp(datetime.now()) assert ts.to_datetime64() == ts.to_numpy() class SubDatetime(datetime): pass @pytest.mark.parametrize( "lh,rh", [ (SubDatetime(2000, 1, 1), Timedelta(hours=1)), (Timedelta(hours=1), SubDatetime(2000, 1, 1)), ], ) def test_dt_subclass_add_timedelta(lh, rh): # GH#25851 # ensure that subclassed datetime works for # Timedelta operations result = lh + rh expected = SubDatetime(2000, 1, 1, 1) assert result == expected
TomAugspurger/pandas
pandas/tests/scalar/timestamp/test_timestamp.py
pandas/tests/dtypes/test_generic.py
import pandas as pd import pandas._testing as tm class TestUnaryOps: def test_invert(self): a = pd.array([True, False, None], dtype="boolean") expected = pd.array([False, True, None], dtype="boolean") tm.assert_extension_array_equal(~a, expected) expected = pd.Series(expected, index=["a", "b", "c"], name="name") result = ~pd.Series(a, index=["a", "b", "c"], name="name") tm.assert_series_equal(result, expected) df = pd.DataFrame({"A": a, "B": [True, False, False]}, index=["a", "b", "c"]) result = ~df expected = pd.DataFrame( {"A": expected, "B": [False, True, True]}, index=["a", "b", "c"] ) tm.assert_frame_equal(result, expected)
""" test the scalar Timestamp """ import calendar from datetime import datetime, timedelta import locale import unicodedata from dateutil.tz import tzutc import numpy as np import pytest import pytz from pytz import timezone, utc from pandas._libs.tslibs.timezones import dateutil_gettz as gettz, get_timezone from pandas.compat.numpy import np_datetime64_compat import pandas.util._test_decorators as td from pandas import NaT, Timedelta, Timestamp import pandas._testing as tm from pandas.tseries import offsets class TestTimestampProperties: def test_properties_business(self): ts = Timestamp("2017-10-01", freq="B") control = Timestamp("2017-10-01") assert ts.dayofweek == 6 assert not ts.is_month_start # not a weekday assert not ts.is_quarter_start # not a weekday # Control case: non-business is month/qtr start assert control.is_month_start assert control.is_quarter_start ts = Timestamp("2017-09-30", freq="B") control = Timestamp("2017-09-30") assert ts.dayofweek == 5 assert not ts.is_month_end # not a weekday assert not ts.is_quarter_end # not a weekday # Control case: non-business is month/qtr start assert control.is_month_end assert control.is_quarter_end def test_fields(self): def check(value, equal): # that we are int like assert isinstance(value, int) assert value == equal # GH 10050 ts = Timestamp("2015-05-10 09:06:03.000100001") check(ts.year, 2015) check(ts.month, 5) check(ts.day, 10) check(ts.hour, 9) check(ts.minute, 6) check(ts.second, 3) msg = "'Timestamp' object has no attribute 'millisecond'" with pytest.raises(AttributeError, match=msg): ts.millisecond check(ts.microsecond, 100) check(ts.nanosecond, 1) check(ts.dayofweek, 6) check(ts.quarter, 2) check(ts.dayofyear, 130) check(ts.week, 19) check(ts.daysinmonth, 31) check(ts.daysinmonth, 31) # GH 13303 ts = Timestamp("2014-12-31 23:59:00-05:00", tz="US/Eastern") check(ts.year, 2014) check(ts.month, 12) check(ts.day, 31) check(ts.hour, 23) check(ts.minute, 59) check(ts.second, 0) msg = "'Timestamp' object has no attribute 'millisecond'" with pytest.raises(AttributeError, match=msg): ts.millisecond check(ts.microsecond, 0) check(ts.nanosecond, 0) check(ts.dayofweek, 2) check(ts.quarter, 4) check(ts.dayofyear, 365) check(ts.week, 1) check(ts.daysinmonth, 31) ts = Timestamp("2014-01-01 00:00:00+01:00") starts = ["is_month_start", "is_quarter_start", "is_year_start"] for start in starts: assert getattr(ts, start) ts = Timestamp("2014-12-31 23:59:59+01:00") ends = ["is_month_end", "is_year_end", "is_quarter_end"] for end in ends: assert getattr(ts, end) # GH 12806 @pytest.mark.parametrize( "data", [Timestamp("2017-08-28 23:00:00"), Timestamp("2017-08-28 23:00:00", tz="EST")], ) @pytest.mark.parametrize( "time_locale", [None] if tm.get_locales() is None else [None] + tm.get_locales() ) def test_names(self, data, time_locale): # GH 17354 # Test .day_name(), .month_name if time_locale is None: expected_day = "Monday" expected_month = "August" else: with tm.set_locale(time_locale, locale.LC_TIME): expected_day = calendar.day_name[0].capitalize() expected_month = calendar.month_name[8].capitalize() result_day = data.day_name(time_locale) result_month = data.month_name(time_locale) # Work around https://github.com/pandas-dev/pandas/issues/22342 # different normalizations expected_day = unicodedata.normalize("NFD", expected_day) expected_month = unicodedata.normalize("NFD", expected_month) result_day = unicodedata.normalize("NFD", result_day) result_month = unicodedata.normalize("NFD", result_month) assert result_day == expected_day assert result_month == expected_month # Test NaT nan_ts = Timestamp(NaT) assert np.isnan(nan_ts.day_name(time_locale)) assert np.isnan(nan_ts.month_name(time_locale)) def test_is_leap_year(self, tz_naive_fixture): tz = tz_naive_fixture # GH 13727 dt = Timestamp("2000-01-01 00:00:00", tz=tz) assert dt.is_leap_year assert isinstance(dt.is_leap_year, bool) dt = Timestamp("1999-01-01 00:00:00", tz=tz) assert not dt.is_leap_year dt = Timestamp("2004-01-01 00:00:00", tz=tz) assert dt.is_leap_year dt = Timestamp("2100-01-01 00:00:00", tz=tz) assert not dt.is_leap_year def test_woy_boundary(self): # make sure weeks at year boundaries are correct d = datetime(2013, 12, 31) result = Timestamp(d).week expected = 1 # ISO standard assert result == expected d = datetime(2008, 12, 28) result = Timestamp(d).week expected = 52 # ISO standard assert result == expected d = datetime(2009, 12, 31) result = Timestamp(d).week expected = 53 # ISO standard assert result == expected d = datetime(2010, 1, 1) result = Timestamp(d).week expected = 53 # ISO standard assert result == expected d = datetime(2010, 1, 3) result = Timestamp(d).week expected = 53 # ISO standard assert result == expected result = np.array( [ Timestamp(datetime(*args)).week for args in [(2000, 1, 1), (2000, 1, 2), (2005, 1, 1), (2005, 1, 2)] ] ) assert (result == [52, 52, 53, 53]).all() def test_resolution(self): # GH#21336, GH#21365 dt = Timestamp("2100-01-01 00:00:00") assert dt.resolution == Timedelta(nanoseconds=1) # Check that the attribute is available on the class, mirroring # the stdlib datetime behavior assert Timestamp.resolution == Timedelta(nanoseconds=1) class TestTimestamp: def test_tz(self): tstr = "2014-02-01 09:00" ts = Timestamp(tstr) local = ts.tz_localize("Asia/Tokyo") assert local.hour == 9 assert local == Timestamp(tstr, tz="Asia/Tokyo") conv = local.tz_convert("US/Eastern") assert conv == Timestamp("2014-01-31 19:00", tz="US/Eastern") assert conv.hour == 19 # preserves nanosecond ts = Timestamp(tstr) + offsets.Nano(5) local = ts.tz_localize("Asia/Tokyo") assert local.hour == 9 assert local.nanosecond == 5 conv = local.tz_convert("US/Eastern") assert conv.nanosecond == 5 assert conv.hour == 19 def test_utc_z_designator(self): assert get_timezone(Timestamp("2014-11-02 01:00Z").tzinfo) is utc def test_asm8(self): np.random.seed(7_960_929) ns = [Timestamp.min.value, Timestamp.max.value, 1000] for n in ns: assert ( Timestamp(n).asm8.view("i8") == np.datetime64(n, "ns").view("i8") == n ) assert Timestamp("nat").asm8.view("i8") == np.datetime64("nat", "ns").view("i8") def test_class_ops_pytz(self): def compare(x, y): assert int((Timestamp(x).value - Timestamp(y).value) / 1e9) == 0 compare(Timestamp.now(), datetime.now()) compare(Timestamp.now("UTC"), datetime.now(timezone("UTC"))) compare(Timestamp.utcnow(), datetime.utcnow()) compare(Timestamp.today(), datetime.today()) current_time = calendar.timegm(datetime.now().utctimetuple()) compare( Timestamp.utcfromtimestamp(current_time), datetime.utcfromtimestamp(current_time), ) compare( Timestamp.fromtimestamp(current_time), datetime.fromtimestamp(current_time) ) date_component = datetime.utcnow() time_component = (date_component + timedelta(minutes=10)).time() compare( Timestamp.combine(date_component, time_component), datetime.combine(date_component, time_component), ) def test_class_ops_dateutil(self): def compare(x, y): assert ( int( np.round(Timestamp(x).value / 1e9) - np.round(Timestamp(y).value / 1e9) ) == 0 ) compare(Timestamp.now(), datetime.now()) compare(Timestamp.now("UTC"), datetime.now(tzutc())) compare(Timestamp.utcnow(), datetime.utcnow()) compare(Timestamp.today(), datetime.today()) current_time = calendar.timegm(datetime.now().utctimetuple()) compare( Timestamp.utcfromtimestamp(current_time), datetime.utcfromtimestamp(current_time), ) compare( Timestamp.fromtimestamp(current_time), datetime.fromtimestamp(current_time) ) date_component = datetime.utcnow() time_component = (date_component + timedelta(minutes=10)).time() compare( Timestamp.combine(date_component, time_component), datetime.combine(date_component, time_component), ) def test_basics_nanos(self): val = np.int64(946_684_800_000_000_000).view("M8[ns]") stamp = Timestamp(val.view("i8") + 500) assert stamp.year == 2000 assert stamp.month == 1 assert stamp.microsecond == 0 assert stamp.nanosecond == 500 # GH 14415 val = np.iinfo(np.int64).min + 80_000_000_000_000 stamp = Timestamp(val) assert stamp.year == 1677 assert stamp.month == 9 assert stamp.day == 21 assert stamp.microsecond == 145224 assert stamp.nanosecond == 192 @pytest.mark.parametrize( "value, check_kwargs", [ [946688461000000000, {}], [946688461000000000 / 1000, dict(unit="us")], [946688461000000000 / 1_000_000, dict(unit="ms")], [946688461000000000 / 1_000_000_000, dict(unit="s")], [10957, dict(unit="D", h=0)], [ (946688461000000000 + 500000) / 1000000000, dict(unit="s", us=499, ns=964), ], [(946688461000000000 + 500000000) / 1000000000, dict(unit="s", us=500000)], [(946688461000000000 + 500000) / 1000000, dict(unit="ms", us=500)], [(946688461000000000 + 500000) / 1000, dict(unit="us", us=500)], [(946688461000000000 + 500000000) / 1000000, dict(unit="ms", us=500000)], [946688461000000000 / 1000.0 + 5, dict(unit="us", us=5)], [946688461000000000 / 1000.0 + 5000, dict(unit="us", us=5000)], [946688461000000000 / 1000000.0 + 0.5, dict(unit="ms", us=500)], [946688461000000000 / 1000000.0 + 0.005, dict(unit="ms", us=5, ns=5)], [946688461000000000 / 1000000000.0 + 0.5, dict(unit="s", us=500000)], [10957 + 0.5, dict(unit="D", h=12)], ], ) def test_unit(self, value, check_kwargs): def check(value, unit=None, h=1, s=1, us=0, ns=0): stamp = Timestamp(value, unit=unit) assert stamp.year == 2000 assert stamp.month == 1 assert stamp.day == 1 assert stamp.hour == h if unit != "D": assert stamp.minute == 1 assert stamp.second == s assert stamp.microsecond == us else: assert stamp.minute == 0 assert stamp.second == 0 assert stamp.microsecond == 0 assert stamp.nanosecond == ns check(value, **check_kwargs) def test_roundtrip(self): # test value to string and back conversions # further test accessors base = Timestamp("20140101 00:00:00") result = Timestamp(base.value + Timedelta("5ms").value) assert result == Timestamp(f"{base}.005000") assert result.microsecond == 5000 result = Timestamp(base.value + Timedelta("5us").value) assert result == Timestamp(f"{base}.000005") assert result.microsecond == 5 result = Timestamp(base.value + Timedelta("5ns").value) assert result == Timestamp(f"{base}.000000005") assert result.nanosecond == 5 assert result.microsecond == 0 result = Timestamp(base.value + Timedelta("6ms 5us").value) assert result == Timestamp(f"{base}.006005") assert result.microsecond == 5 + 6 * 1000 result = Timestamp(base.value + Timedelta("200ms 5us").value) assert result == Timestamp(f"{base}.200005") assert result.microsecond == 5 + 200 * 1000 def test_hash_equivalent(self): d = {datetime(2011, 1, 1): 5} stamp = Timestamp(datetime(2011, 1, 1)) assert d[stamp] == 5 def test_tz_conversion_freq(self, tz_naive_fixture): # GH25241 t1 = Timestamp("2019-01-01 10:00", freq="H") assert t1.tz_localize(tz=tz_naive_fixture).freq == t1.freq t2 = Timestamp("2019-01-02 12:00", tz="UTC", freq="T") assert t2.tz_convert(tz="UTC").freq == t2.freq class TestTimestampNsOperations: def test_nanosecond_string_parsing(self): ts = Timestamp("2013-05-01 07:15:45.123456789") # GH 7878 expected_repr = "2013-05-01 07:15:45.123456789" expected_value = 1_367_392_545_123_456_789 assert ts.value == expected_value assert expected_repr in repr(ts) ts = Timestamp("2013-05-01 07:15:45.123456789+09:00", tz="Asia/Tokyo") assert ts.value == expected_value - 9 * 3600 * 1_000_000_000 assert expected_repr in repr(ts) ts = Timestamp("2013-05-01 07:15:45.123456789", tz="UTC") assert ts.value == expected_value assert expected_repr in repr(ts) ts = Timestamp("2013-05-01 07:15:45.123456789", tz="US/Eastern") assert ts.value == expected_value + 4 * 3600 * 1_000_000_000 assert expected_repr in repr(ts) # GH 10041 ts = Timestamp("20130501T071545.123456789") assert ts.value == expected_value assert expected_repr in repr(ts) def test_nanosecond_timestamp(self): # GH 7610 expected = 1_293_840_000_000_000_005 t = Timestamp("2011-01-01") + offsets.Nano(5) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000005')" assert t.value == expected assert t.nanosecond == 5 t = Timestamp(t) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000005')" assert t.value == expected assert t.nanosecond == 5 t = Timestamp(np_datetime64_compat("2011-01-01 00:00:00.000000005Z")) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000005')" assert t.value == expected assert t.nanosecond == 5 expected = 1_293_840_000_000_000_010 t = t + offsets.Nano(5) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000010')" assert t.value == expected assert t.nanosecond == 10 t = Timestamp(t) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000010')" assert t.value == expected assert t.nanosecond == 10 t = Timestamp(np_datetime64_compat("2011-01-01 00:00:00.000000010Z")) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000010')" assert t.value == expected assert t.nanosecond == 10 class TestTimestampToJulianDate: def test_compare_1700(self): r = Timestamp("1700-06-23").to_julian_date() assert r == 2_342_145.5 def test_compare_2000(self): r = Timestamp("2000-04-12").to_julian_date() assert r == 2_451_646.5 def test_compare_2100(self): r = Timestamp("2100-08-12").to_julian_date() assert r == 2_488_292.5 def test_compare_hour01(self): r = Timestamp("2000-08-12T01:00:00").to_julian_date() assert r == 2_451_768.5416666666666666 def test_compare_hour13(self): r = Timestamp("2000-08-12T13:00:00").to_julian_date() assert r == 2_451_769.0416666666666666 class TestTimestampConversion: def test_conversion(self): # GH#9255 ts = Timestamp("2000-01-01") result = ts.to_pydatetime() expected = datetime(2000, 1, 1) assert result == expected assert type(result) == type(expected) result = ts.to_datetime64() expected = np.datetime64(ts.value, "ns") assert result == expected assert type(result) == type(expected) assert result.dtype == expected.dtype def test_to_pydatetime_nonzero_nano(self): ts = Timestamp("2011-01-01 9:00:00.123456789") # Warn the user of data loss (nanoseconds). with tm.assert_produces_warning(UserWarning, check_stacklevel=False): expected = datetime(2011, 1, 1, 9, 0, 0, 123456) result = ts.to_pydatetime() assert result == expected def test_timestamp_to_datetime(self): stamp = Timestamp("20090415", tz="US/Eastern", freq="D") dtval = stamp.to_pydatetime() assert stamp == dtval assert stamp.tzinfo == dtval.tzinfo def test_timestamp_to_datetime_dateutil(self): stamp = Timestamp("20090415", tz="dateutil/US/Eastern", freq="D") dtval = stamp.to_pydatetime() assert stamp == dtval assert stamp.tzinfo == dtval.tzinfo def test_timestamp_to_datetime_explicit_pytz(self): stamp = Timestamp("20090415", tz=pytz.timezone("US/Eastern"), freq="D") dtval = stamp.to_pydatetime() assert stamp == dtval assert stamp.tzinfo == dtval.tzinfo @td.skip_if_windows_python_3 def test_timestamp_to_datetime_explicit_dateutil(self): stamp = Timestamp("20090415", tz=gettz("US/Eastern"), freq="D") dtval = stamp.to_pydatetime() assert stamp == dtval assert stamp.tzinfo == dtval.tzinfo def test_to_datetime_bijective(self): # Ensure that converting to datetime and back only loses precision # by going from nanoseconds to microseconds. exp_warning = None if Timestamp.max.nanosecond == 0 else UserWarning with tm.assert_produces_warning(exp_warning, check_stacklevel=False): assert ( Timestamp(Timestamp.max.to_pydatetime()).value / 1000 == Timestamp.max.value / 1000 ) exp_warning = None if Timestamp.min.nanosecond == 0 else UserWarning with tm.assert_produces_warning(exp_warning, check_stacklevel=False): assert ( Timestamp(Timestamp.min.to_pydatetime()).value / 1000 == Timestamp.min.value / 1000 ) def test_to_period_tz_warning(self): # GH#21333 make sure a warning is issued when timezone # info is lost ts = Timestamp("2009-04-15 16:17:18", tz="US/Eastern") with tm.assert_produces_warning(UserWarning): # warning that timezone info will be lost ts.to_period("D") def test_to_numpy_alias(self): # GH 24653: alias .to_numpy() for scalars ts = Timestamp(datetime.now()) assert ts.to_datetime64() == ts.to_numpy() class SubDatetime(datetime): pass @pytest.mark.parametrize( "lh,rh", [ (SubDatetime(2000, 1, 1), Timedelta(hours=1)), (Timedelta(hours=1), SubDatetime(2000, 1, 1)), ], ) def test_dt_subclass_add_timedelta(lh, rh): # GH#25851 # ensure that subclassed datetime works for # Timedelta operations result = lh + rh expected = SubDatetime(2000, 1, 1, 1) assert result == expected
TomAugspurger/pandas
pandas/tests/scalar/timestamp/test_timestamp.py
pandas/tests/arrays/boolean/test_ops.py
from contextlib import contextmanager from pandas.plotting._core import _get_plot_backend def table(ax, data, rowLabels=None, colLabels=None, **kwargs): """ Helper function to convert DataFrame and Series to matplotlib.table. Parameters ---------- ax : Matplotlib axes object data : DataFrame or Series Data for table contents. **kwargs Keyword arguments to be passed to matplotlib.table.table. If `rowLabels` or `colLabels` is not specified, data index or column name will be used. Returns ------- matplotlib table object """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.table( ax=ax, data=data, rowLabels=None, colLabels=None, **kwargs ) def register(): """ Register pandas formatters and converters with matplotlib. This function modifies the global ``matplotlib.units.registry`` dictionary. pandas adds custom converters for * pd.Timestamp * pd.Period * np.datetime64 * datetime.datetime * datetime.date * datetime.time See Also -------- deregister_matplotlib_converters : Remove pandas formatters and converters. """ plot_backend = _get_plot_backend("matplotlib") plot_backend.register() def deregister(): """ Remove pandas formatters and converters. Removes the custom converters added by :func:`register`. This attempts to set the state of the registry back to the state before pandas registered its own units. Converters for pandas' own types like Timestamp and Period are removed completely. Converters for types pandas overwrites, like ``datetime.datetime``, are restored to their original value. See Also -------- register_matplotlib_converters : Register pandas formatters and converters with matplotlib. """ plot_backend = _get_plot_backend("matplotlib") plot_backend.deregister() def scatter_matrix( frame, alpha=0.5, figsize=None, ax=None, grid=False, diagonal="hist", marker=".", density_kwds=None, hist_kwds=None, range_padding=0.05, **kwargs, ): """ Draw a matrix of scatter plots. Parameters ---------- frame : DataFrame alpha : float, optional Amount of transparency applied. figsize : (float,float), optional A tuple (width, height) in inches. ax : Matplotlib axis object, optional grid : bool, optional Setting this to True will show the grid. diagonal : {'hist', 'kde'} Pick between 'kde' and 'hist' for either Kernel Density Estimation or Histogram plot in the diagonal. marker : str, optional Matplotlib marker type, default '.'. density_kwds : keywords Keyword arguments to be passed to kernel density estimate plot. hist_kwds : keywords Keyword arguments to be passed to hist function. range_padding : float, default 0.05 Relative extension of axis range in x and y with respect to (x_max - x_min) or (y_max - y_min). **kwargs Keyword arguments to be passed to scatter function. Returns ------- numpy.ndarray A matrix of scatter plots. Examples -------- .. plot:: :context: close-figs >>> df = pd.DataFrame(np.random.randn(1000, 4), columns=['A','B','C','D']) >>> pd.plotting.scatter_matrix(df, alpha=0.2) """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.scatter_matrix( frame=frame, alpha=alpha, figsize=figsize, ax=ax, grid=grid, diagonal=diagonal, marker=marker, density_kwds=density_kwds, hist_kwds=hist_kwds, range_padding=range_padding, **kwargs, ) def radviz(frame, class_column, ax=None, color=None, colormap=None, **kwds): """ Plot a multidimensional dataset in 2D. Each Series in the DataFrame is represented as a evenly distributed slice on a circle. Each data point is rendered in the circle according to the value on each Series. Highly correlated `Series` in the `DataFrame` are placed closer on the unit circle. RadViz allow to project a N-dimensional data set into a 2D space where the influence of each dimension can be interpreted as a balance between the influence of all dimensions. More info available at the `original article <https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.135.889>`_ describing RadViz. Parameters ---------- frame : `DataFrame` pandas object holding the data. class_column : str Column name containing the name of the data point category. ax : :class:`matplotlib.axes.Axes`, optional A plot instance to which to add the information. color : list[str] or tuple[str], optional Assign a color to each category. Example: ['blue', 'green']. colormap : str or :class:`matplotlib.colors.Colormap`, default None Colormap to select colors from. If string, load colormap with that name from matplotlib. **kwds Options to pass to matplotlib scatter plotting method. Returns ------- class:`matplotlib.axes.Axes` See Also -------- plotting.andrews_curves : Plot clustering visualization. Examples -------- .. plot:: :context: close-figs >>> df = pd.DataFrame( ... { ... 'SepalLength': [6.5, 7.7, 5.1, 5.8, 7.6, 5.0, 5.4, 4.6, 6.7, 4.6], ... 'SepalWidth': [3.0, 3.8, 3.8, 2.7, 3.0, 2.3, 3.0, 3.2, 3.3, 3.6], ... 'PetalLength': [5.5, 6.7, 1.9, 5.1, 6.6, 3.3, 4.5, 1.4, 5.7, 1.0], ... 'PetalWidth': [1.8, 2.2, 0.4, 1.9, 2.1, 1.0, 1.5, 0.2, 2.1, 0.2], ... 'Category': [ ... 'virginica', ... 'virginica', ... 'setosa', ... 'virginica', ... 'virginica', ... 'versicolor', ... 'versicolor', ... 'setosa', ... 'virginica', ... 'setosa' ... ] ... } ... ) >>> pd.plotting.radviz(df, 'Category') """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.radviz( frame=frame, class_column=class_column, ax=ax, color=color, colormap=colormap, **kwds, ) def andrews_curves( frame, class_column, ax=None, samples=200, color=None, colormap=None, **kwargs ): """ Generate a matplotlib plot of Andrews curves, for visualising clusters of multivariate data. Andrews curves have the functional form: f(t) = x_1/sqrt(2) + x_2 sin(t) + x_3 cos(t) + x_4 sin(2t) + x_5 cos(2t) + ... Where x coefficients correspond to the values of each dimension and t is linearly spaced between -pi and +pi. Each row of frame then corresponds to a single curve. Parameters ---------- frame : DataFrame Data to be plotted, preferably normalized to (0.0, 1.0). class_column : Name of the column containing class names ax : matplotlib axes object, default None samples : Number of points to plot in each curve color : list or tuple, optional Colors to use for the different classes. colormap : str or matplotlib colormap object, default None Colormap to select colors from. If string, load colormap with that name from matplotlib. **kwargs Options to pass to matplotlib plotting method. Returns ------- class:`matplotlip.axis.Axes` Examples -------- .. plot:: :context: close-figs >>> df = pd.read_csv( ... 'https://raw.github.com/pandas-dev/' ... 'pandas/master/pandas/tests/io/data/csv/iris.csv' ... ) >>> pd.plotting.andrews_curves(df, 'Name') """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.andrews_curves( frame=frame, class_column=class_column, ax=ax, samples=samples, color=color, colormap=colormap, **kwargs, ) def bootstrap_plot(series, fig=None, size=50, samples=500, **kwds): """ Bootstrap plot on mean, median and mid-range statistics. The bootstrap plot is used to estimate the uncertainty of a statistic by relaying on random sampling with replacement [1]_. This function will generate bootstrapping plots for mean, median and mid-range statistics for the given number of samples of the given size. .. [1] "Bootstrapping (statistics)" in \ https://en.wikipedia.org/wiki/Bootstrapping_%28statistics%29 Parameters ---------- series : pandas.Series pandas Series from where to get the samplings for the bootstrapping. fig : matplotlib.figure.Figure, default None If given, it will use the `fig` reference for plotting instead of creating a new one with default parameters. size : int, default 50 Number of data points to consider during each sampling. It must be greater or equal than the length of the `series`. samples : int, default 500 Number of times the bootstrap procedure is performed. **kwds Options to pass to matplotlib plotting method. Returns ------- matplotlib.figure.Figure Matplotlib figure. See Also -------- DataFrame.plot : Basic plotting for DataFrame objects. Series.plot : Basic plotting for Series objects. Examples -------- This example draws a basic bootstap plot for a Series. .. plot:: :context: close-figs >>> s = pd.Series(np.random.uniform(size=100)) >>> pd.plotting.bootstrap_plot(s) """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.bootstrap_plot( series=series, fig=fig, size=size, samples=samples, **kwds ) def parallel_coordinates( frame, class_column, cols=None, ax=None, color=None, use_columns=False, xticks=None, colormap=None, axvlines=True, axvlines_kwds=None, sort_labels=False, **kwargs, ): """ Parallel coordinates plotting. Parameters ---------- frame : DataFrame class_column : str Column name containing class names. cols : list, optional A list of column names to use. ax : matplotlib.axis, optional Matplotlib axis object. color : list or tuple, optional Colors to use for the different classes. use_columns : bool, optional If true, columns will be used as xticks. xticks : list or tuple, optional A list of values to use for xticks. colormap : str or matplotlib colormap, default None Colormap to use for line colors. axvlines : bool, optional If true, vertical lines will be added at each xtick. axvlines_kwds : keywords, optional Options to be passed to axvline method for vertical lines. sort_labels : bool, default False Sort class_column labels, useful when assigning colors. **kwargs Options to pass to matplotlib plotting method. Returns ------- class:`matplotlib.axis.Axes` Examples -------- .. plot:: :context: close-figs >>> df = pd.read_csv( ... 'https://raw.github.com/pandas-dev/' ... 'pandas/master/pandas/tests/io/data/csv/iris.csv' ... ) >>> pd.plotting.parallel_coordinates( ... df, 'Name', color=('#556270', '#4ECDC4', '#C7F464') ... ) """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.parallel_coordinates( frame=frame, class_column=class_column, cols=cols, ax=ax, color=color, use_columns=use_columns, xticks=xticks, colormap=colormap, axvlines=axvlines, axvlines_kwds=axvlines_kwds, sort_labels=sort_labels, **kwargs, ) def lag_plot(series, lag=1, ax=None, **kwds): """ Lag plot for time series. Parameters ---------- series : Time series lag : lag of the scatter plot, default 1 ax : Matplotlib axis object, optional **kwds Matplotlib scatter method keyword arguments. Returns ------- class:`matplotlib.axis.Axes` Examples -------- Lag plots are most commonly used to look for patterns in time series data. Given the following time series .. plot:: :context: close-figs >>> np.random.seed(5) >>> x = np.cumsum(np.random.normal(loc=1, scale=5, size=50)) >>> s = pd.Series(x) >>> s.plot() A lag plot with ``lag=1`` returns .. plot:: :context: close-figs >>> pd.plotting.lag_plot(s, lag=1) """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.lag_plot(series=series, lag=lag, ax=ax, **kwds) def autocorrelation_plot(series, ax=None, **kwargs): """ Autocorrelation plot for time series. Parameters ---------- series : Time series ax : Matplotlib axis object, optional **kwargs Options to pass to matplotlib plotting method. Returns ------- class:`matplotlib.axis.Axes` Examples -------- The horizontal lines in the plot correspond to 95% and 99% confidence bands. The dashed line is 99% confidence band. .. plot:: :context: close-figs >>> spacing = np.linspace(-9 * np.pi, 9 * np.pi, num=1000) >>> s = pd.Series(0.7 * np.random.rand(1000) + 0.3 * np.sin(spacing)) >>> pd.plotting.autocorrelation_plot(s) """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.autocorrelation_plot(series=series, ax=ax, **kwargs) class _Options(dict): """ Stores pandas plotting options. Allows for parameter aliasing so you can just use parameter names that are the same as the plot function parameters, but is stored in a canonical format that makes it easy to breakdown into groups later. """ # alias so the names are same as plotting method parameter names _ALIASES = {"x_compat": "xaxis.compat"} _DEFAULT_KEYS = ["xaxis.compat"] def __init__(self, deprecated=False): self._deprecated = deprecated super().__setitem__("xaxis.compat", False) def __getitem__(self, key): key = self._get_canonical_key(key) if key not in self: raise ValueError(f"{key} is not a valid pandas plotting option") return super().__getitem__(key) def __setitem__(self, key, value): key = self._get_canonical_key(key) return super().__setitem__(key, value) def __delitem__(self, key): key = self._get_canonical_key(key) if key in self._DEFAULT_KEYS: raise ValueError(f"Cannot remove default parameter {key}") return super().__delitem__(key) def __contains__(self, key) -> bool: key = self._get_canonical_key(key) return super().__contains__(key) def reset(self): """ Reset the option store to its initial state Returns ------- None """ self.__init__() def _get_canonical_key(self, key): return self._ALIASES.get(key, key) @contextmanager def use(self, key, value): """ Temporarily set a parameter value using the with statement. Aliasing allowed. """ old_value = self[key] try: self[key] = value yield self finally: self[key] = old_value plot_params = _Options()
""" test the scalar Timestamp """ import calendar from datetime import datetime, timedelta import locale import unicodedata from dateutil.tz import tzutc import numpy as np import pytest import pytz from pytz import timezone, utc from pandas._libs.tslibs.timezones import dateutil_gettz as gettz, get_timezone from pandas.compat.numpy import np_datetime64_compat import pandas.util._test_decorators as td from pandas import NaT, Timedelta, Timestamp import pandas._testing as tm from pandas.tseries import offsets class TestTimestampProperties: def test_properties_business(self): ts = Timestamp("2017-10-01", freq="B") control = Timestamp("2017-10-01") assert ts.dayofweek == 6 assert not ts.is_month_start # not a weekday assert not ts.is_quarter_start # not a weekday # Control case: non-business is month/qtr start assert control.is_month_start assert control.is_quarter_start ts = Timestamp("2017-09-30", freq="B") control = Timestamp("2017-09-30") assert ts.dayofweek == 5 assert not ts.is_month_end # not a weekday assert not ts.is_quarter_end # not a weekday # Control case: non-business is month/qtr start assert control.is_month_end assert control.is_quarter_end def test_fields(self): def check(value, equal): # that we are int like assert isinstance(value, int) assert value == equal # GH 10050 ts = Timestamp("2015-05-10 09:06:03.000100001") check(ts.year, 2015) check(ts.month, 5) check(ts.day, 10) check(ts.hour, 9) check(ts.minute, 6) check(ts.second, 3) msg = "'Timestamp' object has no attribute 'millisecond'" with pytest.raises(AttributeError, match=msg): ts.millisecond check(ts.microsecond, 100) check(ts.nanosecond, 1) check(ts.dayofweek, 6) check(ts.quarter, 2) check(ts.dayofyear, 130) check(ts.week, 19) check(ts.daysinmonth, 31) check(ts.daysinmonth, 31) # GH 13303 ts = Timestamp("2014-12-31 23:59:00-05:00", tz="US/Eastern") check(ts.year, 2014) check(ts.month, 12) check(ts.day, 31) check(ts.hour, 23) check(ts.minute, 59) check(ts.second, 0) msg = "'Timestamp' object has no attribute 'millisecond'" with pytest.raises(AttributeError, match=msg): ts.millisecond check(ts.microsecond, 0) check(ts.nanosecond, 0) check(ts.dayofweek, 2) check(ts.quarter, 4) check(ts.dayofyear, 365) check(ts.week, 1) check(ts.daysinmonth, 31) ts = Timestamp("2014-01-01 00:00:00+01:00") starts = ["is_month_start", "is_quarter_start", "is_year_start"] for start in starts: assert getattr(ts, start) ts = Timestamp("2014-12-31 23:59:59+01:00") ends = ["is_month_end", "is_year_end", "is_quarter_end"] for end in ends: assert getattr(ts, end) # GH 12806 @pytest.mark.parametrize( "data", [Timestamp("2017-08-28 23:00:00"), Timestamp("2017-08-28 23:00:00", tz="EST")], ) @pytest.mark.parametrize( "time_locale", [None] if tm.get_locales() is None else [None] + tm.get_locales() ) def test_names(self, data, time_locale): # GH 17354 # Test .day_name(), .month_name if time_locale is None: expected_day = "Monday" expected_month = "August" else: with tm.set_locale(time_locale, locale.LC_TIME): expected_day = calendar.day_name[0].capitalize() expected_month = calendar.month_name[8].capitalize() result_day = data.day_name(time_locale) result_month = data.month_name(time_locale) # Work around https://github.com/pandas-dev/pandas/issues/22342 # different normalizations expected_day = unicodedata.normalize("NFD", expected_day) expected_month = unicodedata.normalize("NFD", expected_month) result_day = unicodedata.normalize("NFD", result_day) result_month = unicodedata.normalize("NFD", result_month) assert result_day == expected_day assert result_month == expected_month # Test NaT nan_ts = Timestamp(NaT) assert np.isnan(nan_ts.day_name(time_locale)) assert np.isnan(nan_ts.month_name(time_locale)) def test_is_leap_year(self, tz_naive_fixture): tz = tz_naive_fixture # GH 13727 dt = Timestamp("2000-01-01 00:00:00", tz=tz) assert dt.is_leap_year assert isinstance(dt.is_leap_year, bool) dt = Timestamp("1999-01-01 00:00:00", tz=tz) assert not dt.is_leap_year dt = Timestamp("2004-01-01 00:00:00", tz=tz) assert dt.is_leap_year dt = Timestamp("2100-01-01 00:00:00", tz=tz) assert not dt.is_leap_year def test_woy_boundary(self): # make sure weeks at year boundaries are correct d = datetime(2013, 12, 31) result = Timestamp(d).week expected = 1 # ISO standard assert result == expected d = datetime(2008, 12, 28) result = Timestamp(d).week expected = 52 # ISO standard assert result == expected d = datetime(2009, 12, 31) result = Timestamp(d).week expected = 53 # ISO standard assert result == expected d = datetime(2010, 1, 1) result = Timestamp(d).week expected = 53 # ISO standard assert result == expected d = datetime(2010, 1, 3) result = Timestamp(d).week expected = 53 # ISO standard assert result == expected result = np.array( [ Timestamp(datetime(*args)).week for args in [(2000, 1, 1), (2000, 1, 2), (2005, 1, 1), (2005, 1, 2)] ] ) assert (result == [52, 52, 53, 53]).all() def test_resolution(self): # GH#21336, GH#21365 dt = Timestamp("2100-01-01 00:00:00") assert dt.resolution == Timedelta(nanoseconds=1) # Check that the attribute is available on the class, mirroring # the stdlib datetime behavior assert Timestamp.resolution == Timedelta(nanoseconds=1) class TestTimestamp: def test_tz(self): tstr = "2014-02-01 09:00" ts = Timestamp(tstr) local = ts.tz_localize("Asia/Tokyo") assert local.hour == 9 assert local == Timestamp(tstr, tz="Asia/Tokyo") conv = local.tz_convert("US/Eastern") assert conv == Timestamp("2014-01-31 19:00", tz="US/Eastern") assert conv.hour == 19 # preserves nanosecond ts = Timestamp(tstr) + offsets.Nano(5) local = ts.tz_localize("Asia/Tokyo") assert local.hour == 9 assert local.nanosecond == 5 conv = local.tz_convert("US/Eastern") assert conv.nanosecond == 5 assert conv.hour == 19 def test_utc_z_designator(self): assert get_timezone(Timestamp("2014-11-02 01:00Z").tzinfo) is utc def test_asm8(self): np.random.seed(7_960_929) ns = [Timestamp.min.value, Timestamp.max.value, 1000] for n in ns: assert ( Timestamp(n).asm8.view("i8") == np.datetime64(n, "ns").view("i8") == n ) assert Timestamp("nat").asm8.view("i8") == np.datetime64("nat", "ns").view("i8") def test_class_ops_pytz(self): def compare(x, y): assert int((Timestamp(x).value - Timestamp(y).value) / 1e9) == 0 compare(Timestamp.now(), datetime.now()) compare(Timestamp.now("UTC"), datetime.now(timezone("UTC"))) compare(Timestamp.utcnow(), datetime.utcnow()) compare(Timestamp.today(), datetime.today()) current_time = calendar.timegm(datetime.now().utctimetuple()) compare( Timestamp.utcfromtimestamp(current_time), datetime.utcfromtimestamp(current_time), ) compare( Timestamp.fromtimestamp(current_time), datetime.fromtimestamp(current_time) ) date_component = datetime.utcnow() time_component = (date_component + timedelta(minutes=10)).time() compare( Timestamp.combine(date_component, time_component), datetime.combine(date_component, time_component), ) def test_class_ops_dateutil(self): def compare(x, y): assert ( int( np.round(Timestamp(x).value / 1e9) - np.round(Timestamp(y).value / 1e9) ) == 0 ) compare(Timestamp.now(), datetime.now()) compare(Timestamp.now("UTC"), datetime.now(tzutc())) compare(Timestamp.utcnow(), datetime.utcnow()) compare(Timestamp.today(), datetime.today()) current_time = calendar.timegm(datetime.now().utctimetuple()) compare( Timestamp.utcfromtimestamp(current_time), datetime.utcfromtimestamp(current_time), ) compare( Timestamp.fromtimestamp(current_time), datetime.fromtimestamp(current_time) ) date_component = datetime.utcnow() time_component = (date_component + timedelta(minutes=10)).time() compare( Timestamp.combine(date_component, time_component), datetime.combine(date_component, time_component), ) def test_basics_nanos(self): val = np.int64(946_684_800_000_000_000).view("M8[ns]") stamp = Timestamp(val.view("i8") + 500) assert stamp.year == 2000 assert stamp.month == 1 assert stamp.microsecond == 0 assert stamp.nanosecond == 500 # GH 14415 val = np.iinfo(np.int64).min + 80_000_000_000_000 stamp = Timestamp(val) assert stamp.year == 1677 assert stamp.month == 9 assert stamp.day == 21 assert stamp.microsecond == 145224 assert stamp.nanosecond == 192 @pytest.mark.parametrize( "value, check_kwargs", [ [946688461000000000, {}], [946688461000000000 / 1000, dict(unit="us")], [946688461000000000 / 1_000_000, dict(unit="ms")], [946688461000000000 / 1_000_000_000, dict(unit="s")], [10957, dict(unit="D", h=0)], [ (946688461000000000 + 500000) / 1000000000, dict(unit="s", us=499, ns=964), ], [(946688461000000000 + 500000000) / 1000000000, dict(unit="s", us=500000)], [(946688461000000000 + 500000) / 1000000, dict(unit="ms", us=500)], [(946688461000000000 + 500000) / 1000, dict(unit="us", us=500)], [(946688461000000000 + 500000000) / 1000000, dict(unit="ms", us=500000)], [946688461000000000 / 1000.0 + 5, dict(unit="us", us=5)], [946688461000000000 / 1000.0 + 5000, dict(unit="us", us=5000)], [946688461000000000 / 1000000.0 + 0.5, dict(unit="ms", us=500)], [946688461000000000 / 1000000.0 + 0.005, dict(unit="ms", us=5, ns=5)], [946688461000000000 / 1000000000.0 + 0.5, dict(unit="s", us=500000)], [10957 + 0.5, dict(unit="D", h=12)], ], ) def test_unit(self, value, check_kwargs): def check(value, unit=None, h=1, s=1, us=0, ns=0): stamp = Timestamp(value, unit=unit) assert stamp.year == 2000 assert stamp.month == 1 assert stamp.day == 1 assert stamp.hour == h if unit != "D": assert stamp.minute == 1 assert stamp.second == s assert stamp.microsecond == us else: assert stamp.minute == 0 assert stamp.second == 0 assert stamp.microsecond == 0 assert stamp.nanosecond == ns check(value, **check_kwargs) def test_roundtrip(self): # test value to string and back conversions # further test accessors base = Timestamp("20140101 00:00:00") result = Timestamp(base.value + Timedelta("5ms").value) assert result == Timestamp(f"{base}.005000") assert result.microsecond == 5000 result = Timestamp(base.value + Timedelta("5us").value) assert result == Timestamp(f"{base}.000005") assert result.microsecond == 5 result = Timestamp(base.value + Timedelta("5ns").value) assert result == Timestamp(f"{base}.000000005") assert result.nanosecond == 5 assert result.microsecond == 0 result = Timestamp(base.value + Timedelta("6ms 5us").value) assert result == Timestamp(f"{base}.006005") assert result.microsecond == 5 + 6 * 1000 result = Timestamp(base.value + Timedelta("200ms 5us").value) assert result == Timestamp(f"{base}.200005") assert result.microsecond == 5 + 200 * 1000 def test_hash_equivalent(self): d = {datetime(2011, 1, 1): 5} stamp = Timestamp(datetime(2011, 1, 1)) assert d[stamp] == 5 def test_tz_conversion_freq(self, tz_naive_fixture): # GH25241 t1 = Timestamp("2019-01-01 10:00", freq="H") assert t1.tz_localize(tz=tz_naive_fixture).freq == t1.freq t2 = Timestamp("2019-01-02 12:00", tz="UTC", freq="T") assert t2.tz_convert(tz="UTC").freq == t2.freq class TestTimestampNsOperations: def test_nanosecond_string_parsing(self): ts = Timestamp("2013-05-01 07:15:45.123456789") # GH 7878 expected_repr = "2013-05-01 07:15:45.123456789" expected_value = 1_367_392_545_123_456_789 assert ts.value == expected_value assert expected_repr in repr(ts) ts = Timestamp("2013-05-01 07:15:45.123456789+09:00", tz="Asia/Tokyo") assert ts.value == expected_value - 9 * 3600 * 1_000_000_000 assert expected_repr in repr(ts) ts = Timestamp("2013-05-01 07:15:45.123456789", tz="UTC") assert ts.value == expected_value assert expected_repr in repr(ts) ts = Timestamp("2013-05-01 07:15:45.123456789", tz="US/Eastern") assert ts.value == expected_value + 4 * 3600 * 1_000_000_000 assert expected_repr in repr(ts) # GH 10041 ts = Timestamp("20130501T071545.123456789") assert ts.value == expected_value assert expected_repr in repr(ts) def test_nanosecond_timestamp(self): # GH 7610 expected = 1_293_840_000_000_000_005 t = Timestamp("2011-01-01") + offsets.Nano(5) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000005')" assert t.value == expected assert t.nanosecond == 5 t = Timestamp(t) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000005')" assert t.value == expected assert t.nanosecond == 5 t = Timestamp(np_datetime64_compat("2011-01-01 00:00:00.000000005Z")) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000005')" assert t.value == expected assert t.nanosecond == 5 expected = 1_293_840_000_000_000_010 t = t + offsets.Nano(5) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000010')" assert t.value == expected assert t.nanosecond == 10 t = Timestamp(t) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000010')" assert t.value == expected assert t.nanosecond == 10 t = Timestamp(np_datetime64_compat("2011-01-01 00:00:00.000000010Z")) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000010')" assert t.value == expected assert t.nanosecond == 10 class TestTimestampToJulianDate: def test_compare_1700(self): r = Timestamp("1700-06-23").to_julian_date() assert r == 2_342_145.5 def test_compare_2000(self): r = Timestamp("2000-04-12").to_julian_date() assert r == 2_451_646.5 def test_compare_2100(self): r = Timestamp("2100-08-12").to_julian_date() assert r == 2_488_292.5 def test_compare_hour01(self): r = Timestamp("2000-08-12T01:00:00").to_julian_date() assert r == 2_451_768.5416666666666666 def test_compare_hour13(self): r = Timestamp("2000-08-12T13:00:00").to_julian_date() assert r == 2_451_769.0416666666666666 class TestTimestampConversion: def test_conversion(self): # GH#9255 ts = Timestamp("2000-01-01") result = ts.to_pydatetime() expected = datetime(2000, 1, 1) assert result == expected assert type(result) == type(expected) result = ts.to_datetime64() expected = np.datetime64(ts.value, "ns") assert result == expected assert type(result) == type(expected) assert result.dtype == expected.dtype def test_to_pydatetime_nonzero_nano(self): ts = Timestamp("2011-01-01 9:00:00.123456789") # Warn the user of data loss (nanoseconds). with tm.assert_produces_warning(UserWarning, check_stacklevel=False): expected = datetime(2011, 1, 1, 9, 0, 0, 123456) result = ts.to_pydatetime() assert result == expected def test_timestamp_to_datetime(self): stamp = Timestamp("20090415", tz="US/Eastern", freq="D") dtval = stamp.to_pydatetime() assert stamp == dtval assert stamp.tzinfo == dtval.tzinfo def test_timestamp_to_datetime_dateutil(self): stamp = Timestamp("20090415", tz="dateutil/US/Eastern", freq="D") dtval = stamp.to_pydatetime() assert stamp == dtval assert stamp.tzinfo == dtval.tzinfo def test_timestamp_to_datetime_explicit_pytz(self): stamp = Timestamp("20090415", tz=pytz.timezone("US/Eastern"), freq="D") dtval = stamp.to_pydatetime() assert stamp == dtval assert stamp.tzinfo == dtval.tzinfo @td.skip_if_windows_python_3 def test_timestamp_to_datetime_explicit_dateutil(self): stamp = Timestamp("20090415", tz=gettz("US/Eastern"), freq="D") dtval = stamp.to_pydatetime() assert stamp == dtval assert stamp.tzinfo == dtval.tzinfo def test_to_datetime_bijective(self): # Ensure that converting to datetime and back only loses precision # by going from nanoseconds to microseconds. exp_warning = None if Timestamp.max.nanosecond == 0 else UserWarning with tm.assert_produces_warning(exp_warning, check_stacklevel=False): assert ( Timestamp(Timestamp.max.to_pydatetime()).value / 1000 == Timestamp.max.value / 1000 ) exp_warning = None if Timestamp.min.nanosecond == 0 else UserWarning with tm.assert_produces_warning(exp_warning, check_stacklevel=False): assert ( Timestamp(Timestamp.min.to_pydatetime()).value / 1000 == Timestamp.min.value / 1000 ) def test_to_period_tz_warning(self): # GH#21333 make sure a warning is issued when timezone # info is lost ts = Timestamp("2009-04-15 16:17:18", tz="US/Eastern") with tm.assert_produces_warning(UserWarning): # warning that timezone info will be lost ts.to_period("D") def test_to_numpy_alias(self): # GH 24653: alias .to_numpy() for scalars ts = Timestamp(datetime.now()) assert ts.to_datetime64() == ts.to_numpy() class SubDatetime(datetime): pass @pytest.mark.parametrize( "lh,rh", [ (SubDatetime(2000, 1, 1), Timedelta(hours=1)), (Timedelta(hours=1), SubDatetime(2000, 1, 1)), ], ) def test_dt_subclass_add_timedelta(lh, rh): # GH#25851 # ensure that subclassed datetime works for # Timedelta operations result = lh + rh expected = SubDatetime(2000, 1, 1, 1) assert result == expected
TomAugspurger/pandas
pandas/tests/scalar/timestamp/test_timestamp.py
pandas/plotting/_misc.py
""" Helper functions to generate range-like data for DatetimeArray (and possibly TimedeltaArray/PeriodArray) """ from typing import Union import numpy as np from pandas._libs.tslibs import OutOfBoundsDatetime, Timedelta, Timestamp from pandas.tseries.offsets import DateOffset def generate_regular_range( start: Union[Timestamp, Timedelta], end: Union[Timestamp, Timedelta], periods: int, freq: DateOffset, ): """ Generate a range of dates or timestamps with the spans between dates described by the given `freq` DateOffset. Parameters ---------- start : Timedelta, Timestamp or None First point of produced date range. end : Timedelta, Timestamp or None Last point of produced date range. periods : int Number of periods in produced date range. freq : Tick Describes space between dates in produced date range. Returns ------- ndarray[np.int64] Representing nanoseconds. """ start = start.value if start is not None else None end = end.value if end is not None else None stride = freq.nanos if periods is None: b = start # cannot just use e = Timestamp(end) + 1 because arange breaks when # stride is too large, see GH10887 e = b + (end - b) // stride * stride + stride // 2 + 1 elif start is not None: b = start e = _generate_range_overflow_safe(b, periods, stride, side="start") elif end is not None: e = end + stride b = _generate_range_overflow_safe(e, periods, stride, side="end") else: raise ValueError( "at least 'start' or 'end' should be specified if a 'period' is given." ) with np.errstate(over="raise"): # If the range is sufficiently large, np.arange may overflow # and incorrectly return an empty array if not caught. try: values = np.arange(b, e, stride, dtype=np.int64) except FloatingPointError: xdr = [b] while xdr[-1] != e: xdr.append(xdr[-1] + stride) values = np.array(xdr[:-1], dtype=np.int64) return values def _generate_range_overflow_safe( endpoint: int, periods: int, stride: int, side: str = "start" ) -> int: """ Calculate the second endpoint for passing to np.arange, checking to avoid an integer overflow. Catch OverflowError and re-raise as OutOfBoundsDatetime. Parameters ---------- endpoint : int nanosecond timestamp of the known endpoint of the desired range periods : int number of periods in the desired range stride : int nanoseconds between periods in the desired range side : {'start', 'end'} which end of the range `endpoint` refers to Returns ------- other_end : int Raises ------ OutOfBoundsDatetime """ # GH#14187 raise instead of incorrectly wrapping around assert side in ["start", "end"] i64max = np.uint64(np.iinfo(np.int64).max) msg = f"Cannot generate range with {side}={endpoint} and periods={periods}" with np.errstate(over="raise"): # if periods * strides cannot be multiplied within the *uint64* bounds, # we cannot salvage the operation by recursing, so raise try: addend = np.uint64(periods) * np.uint64(np.abs(stride)) except FloatingPointError as err: raise OutOfBoundsDatetime(msg) from err if np.abs(addend) <= i64max: # relatively easy case without casting concerns return _generate_range_overflow_safe_signed(endpoint, periods, stride, side) elif (endpoint > 0 and side == "start" and stride > 0) or ( endpoint < 0 and side == "end" and stride > 0 ): # no chance of not-overflowing raise OutOfBoundsDatetime(msg) elif side == "end" and endpoint > i64max and endpoint - stride <= i64max: # in _generate_regular_range we added `stride` thereby overflowing # the bounds. Adjust to fix this. return _generate_range_overflow_safe( endpoint - stride, periods - 1, stride, side ) # split into smaller pieces mid_periods = periods // 2 remaining = periods - mid_periods assert 0 < remaining < periods, (remaining, periods, endpoint, stride) midpoint = _generate_range_overflow_safe(endpoint, mid_periods, stride, side) return _generate_range_overflow_safe(midpoint, remaining, stride, side) def _generate_range_overflow_safe_signed( endpoint: int, periods: int, stride: int, side: str ) -> int: """ A special case for _generate_range_overflow_safe where `periods * stride` can be calculated without overflowing int64 bounds. """ assert side in ["start", "end"] if side == "end": stride *= -1 with np.errstate(over="raise"): addend = np.int64(periods) * np.int64(stride) try: # easy case with no overflows return np.int64(endpoint) + addend except (FloatingPointError, OverflowError): # with endpoint negative and addend positive we risk # FloatingPointError; with reversed signed we risk OverflowError pass # if stride and endpoint had opposite signs, then endpoint + addend # should never overflow. so they must have the same signs assert (stride > 0 and endpoint >= 0) or (stride < 0 and endpoint <= 0) if stride > 0: # watch out for very special case in which we just slightly # exceed implementation bounds, but when passing the result to # np.arange will get a result slightly within the bounds result = np.uint64(endpoint) + np.uint64(addend) i64max = np.uint64(np.iinfo(np.int64).max) assert result > i64max if result <= i64max + np.uint64(stride): return result raise OutOfBoundsDatetime( f"Cannot generate range with {side}={endpoint} and periods={periods}" )
""" test the scalar Timestamp """ import calendar from datetime import datetime, timedelta import locale import unicodedata from dateutil.tz import tzutc import numpy as np import pytest import pytz from pytz import timezone, utc from pandas._libs.tslibs.timezones import dateutil_gettz as gettz, get_timezone from pandas.compat.numpy import np_datetime64_compat import pandas.util._test_decorators as td from pandas import NaT, Timedelta, Timestamp import pandas._testing as tm from pandas.tseries import offsets class TestTimestampProperties: def test_properties_business(self): ts = Timestamp("2017-10-01", freq="B") control = Timestamp("2017-10-01") assert ts.dayofweek == 6 assert not ts.is_month_start # not a weekday assert not ts.is_quarter_start # not a weekday # Control case: non-business is month/qtr start assert control.is_month_start assert control.is_quarter_start ts = Timestamp("2017-09-30", freq="B") control = Timestamp("2017-09-30") assert ts.dayofweek == 5 assert not ts.is_month_end # not a weekday assert not ts.is_quarter_end # not a weekday # Control case: non-business is month/qtr start assert control.is_month_end assert control.is_quarter_end def test_fields(self): def check(value, equal): # that we are int like assert isinstance(value, int) assert value == equal # GH 10050 ts = Timestamp("2015-05-10 09:06:03.000100001") check(ts.year, 2015) check(ts.month, 5) check(ts.day, 10) check(ts.hour, 9) check(ts.minute, 6) check(ts.second, 3) msg = "'Timestamp' object has no attribute 'millisecond'" with pytest.raises(AttributeError, match=msg): ts.millisecond check(ts.microsecond, 100) check(ts.nanosecond, 1) check(ts.dayofweek, 6) check(ts.quarter, 2) check(ts.dayofyear, 130) check(ts.week, 19) check(ts.daysinmonth, 31) check(ts.daysinmonth, 31) # GH 13303 ts = Timestamp("2014-12-31 23:59:00-05:00", tz="US/Eastern") check(ts.year, 2014) check(ts.month, 12) check(ts.day, 31) check(ts.hour, 23) check(ts.minute, 59) check(ts.second, 0) msg = "'Timestamp' object has no attribute 'millisecond'" with pytest.raises(AttributeError, match=msg): ts.millisecond check(ts.microsecond, 0) check(ts.nanosecond, 0) check(ts.dayofweek, 2) check(ts.quarter, 4) check(ts.dayofyear, 365) check(ts.week, 1) check(ts.daysinmonth, 31) ts = Timestamp("2014-01-01 00:00:00+01:00") starts = ["is_month_start", "is_quarter_start", "is_year_start"] for start in starts: assert getattr(ts, start) ts = Timestamp("2014-12-31 23:59:59+01:00") ends = ["is_month_end", "is_year_end", "is_quarter_end"] for end in ends: assert getattr(ts, end) # GH 12806 @pytest.mark.parametrize( "data", [Timestamp("2017-08-28 23:00:00"), Timestamp("2017-08-28 23:00:00", tz="EST")], ) @pytest.mark.parametrize( "time_locale", [None] if tm.get_locales() is None else [None] + tm.get_locales() ) def test_names(self, data, time_locale): # GH 17354 # Test .day_name(), .month_name if time_locale is None: expected_day = "Monday" expected_month = "August" else: with tm.set_locale(time_locale, locale.LC_TIME): expected_day = calendar.day_name[0].capitalize() expected_month = calendar.month_name[8].capitalize() result_day = data.day_name(time_locale) result_month = data.month_name(time_locale) # Work around https://github.com/pandas-dev/pandas/issues/22342 # different normalizations expected_day = unicodedata.normalize("NFD", expected_day) expected_month = unicodedata.normalize("NFD", expected_month) result_day = unicodedata.normalize("NFD", result_day) result_month = unicodedata.normalize("NFD", result_month) assert result_day == expected_day assert result_month == expected_month # Test NaT nan_ts = Timestamp(NaT) assert np.isnan(nan_ts.day_name(time_locale)) assert np.isnan(nan_ts.month_name(time_locale)) def test_is_leap_year(self, tz_naive_fixture): tz = tz_naive_fixture # GH 13727 dt = Timestamp("2000-01-01 00:00:00", tz=tz) assert dt.is_leap_year assert isinstance(dt.is_leap_year, bool) dt = Timestamp("1999-01-01 00:00:00", tz=tz) assert not dt.is_leap_year dt = Timestamp("2004-01-01 00:00:00", tz=tz) assert dt.is_leap_year dt = Timestamp("2100-01-01 00:00:00", tz=tz) assert not dt.is_leap_year def test_woy_boundary(self): # make sure weeks at year boundaries are correct d = datetime(2013, 12, 31) result = Timestamp(d).week expected = 1 # ISO standard assert result == expected d = datetime(2008, 12, 28) result = Timestamp(d).week expected = 52 # ISO standard assert result == expected d = datetime(2009, 12, 31) result = Timestamp(d).week expected = 53 # ISO standard assert result == expected d = datetime(2010, 1, 1) result = Timestamp(d).week expected = 53 # ISO standard assert result == expected d = datetime(2010, 1, 3) result = Timestamp(d).week expected = 53 # ISO standard assert result == expected result = np.array( [ Timestamp(datetime(*args)).week for args in [(2000, 1, 1), (2000, 1, 2), (2005, 1, 1), (2005, 1, 2)] ] ) assert (result == [52, 52, 53, 53]).all() def test_resolution(self): # GH#21336, GH#21365 dt = Timestamp("2100-01-01 00:00:00") assert dt.resolution == Timedelta(nanoseconds=1) # Check that the attribute is available on the class, mirroring # the stdlib datetime behavior assert Timestamp.resolution == Timedelta(nanoseconds=1) class TestTimestamp: def test_tz(self): tstr = "2014-02-01 09:00" ts = Timestamp(tstr) local = ts.tz_localize("Asia/Tokyo") assert local.hour == 9 assert local == Timestamp(tstr, tz="Asia/Tokyo") conv = local.tz_convert("US/Eastern") assert conv == Timestamp("2014-01-31 19:00", tz="US/Eastern") assert conv.hour == 19 # preserves nanosecond ts = Timestamp(tstr) + offsets.Nano(5) local = ts.tz_localize("Asia/Tokyo") assert local.hour == 9 assert local.nanosecond == 5 conv = local.tz_convert("US/Eastern") assert conv.nanosecond == 5 assert conv.hour == 19 def test_utc_z_designator(self): assert get_timezone(Timestamp("2014-11-02 01:00Z").tzinfo) is utc def test_asm8(self): np.random.seed(7_960_929) ns = [Timestamp.min.value, Timestamp.max.value, 1000] for n in ns: assert ( Timestamp(n).asm8.view("i8") == np.datetime64(n, "ns").view("i8") == n ) assert Timestamp("nat").asm8.view("i8") == np.datetime64("nat", "ns").view("i8") def test_class_ops_pytz(self): def compare(x, y): assert int((Timestamp(x).value - Timestamp(y).value) / 1e9) == 0 compare(Timestamp.now(), datetime.now()) compare(Timestamp.now("UTC"), datetime.now(timezone("UTC"))) compare(Timestamp.utcnow(), datetime.utcnow()) compare(Timestamp.today(), datetime.today()) current_time = calendar.timegm(datetime.now().utctimetuple()) compare( Timestamp.utcfromtimestamp(current_time), datetime.utcfromtimestamp(current_time), ) compare( Timestamp.fromtimestamp(current_time), datetime.fromtimestamp(current_time) ) date_component = datetime.utcnow() time_component = (date_component + timedelta(minutes=10)).time() compare( Timestamp.combine(date_component, time_component), datetime.combine(date_component, time_component), ) def test_class_ops_dateutil(self): def compare(x, y): assert ( int( np.round(Timestamp(x).value / 1e9) - np.round(Timestamp(y).value / 1e9) ) == 0 ) compare(Timestamp.now(), datetime.now()) compare(Timestamp.now("UTC"), datetime.now(tzutc())) compare(Timestamp.utcnow(), datetime.utcnow()) compare(Timestamp.today(), datetime.today()) current_time = calendar.timegm(datetime.now().utctimetuple()) compare( Timestamp.utcfromtimestamp(current_time), datetime.utcfromtimestamp(current_time), ) compare( Timestamp.fromtimestamp(current_time), datetime.fromtimestamp(current_time) ) date_component = datetime.utcnow() time_component = (date_component + timedelta(minutes=10)).time() compare( Timestamp.combine(date_component, time_component), datetime.combine(date_component, time_component), ) def test_basics_nanos(self): val = np.int64(946_684_800_000_000_000).view("M8[ns]") stamp = Timestamp(val.view("i8") + 500) assert stamp.year == 2000 assert stamp.month == 1 assert stamp.microsecond == 0 assert stamp.nanosecond == 500 # GH 14415 val = np.iinfo(np.int64).min + 80_000_000_000_000 stamp = Timestamp(val) assert stamp.year == 1677 assert stamp.month == 9 assert stamp.day == 21 assert stamp.microsecond == 145224 assert stamp.nanosecond == 192 @pytest.mark.parametrize( "value, check_kwargs", [ [946688461000000000, {}], [946688461000000000 / 1000, dict(unit="us")], [946688461000000000 / 1_000_000, dict(unit="ms")], [946688461000000000 / 1_000_000_000, dict(unit="s")], [10957, dict(unit="D", h=0)], [ (946688461000000000 + 500000) / 1000000000, dict(unit="s", us=499, ns=964), ], [(946688461000000000 + 500000000) / 1000000000, dict(unit="s", us=500000)], [(946688461000000000 + 500000) / 1000000, dict(unit="ms", us=500)], [(946688461000000000 + 500000) / 1000, dict(unit="us", us=500)], [(946688461000000000 + 500000000) / 1000000, dict(unit="ms", us=500000)], [946688461000000000 / 1000.0 + 5, dict(unit="us", us=5)], [946688461000000000 / 1000.0 + 5000, dict(unit="us", us=5000)], [946688461000000000 / 1000000.0 + 0.5, dict(unit="ms", us=500)], [946688461000000000 / 1000000.0 + 0.005, dict(unit="ms", us=5, ns=5)], [946688461000000000 / 1000000000.0 + 0.5, dict(unit="s", us=500000)], [10957 + 0.5, dict(unit="D", h=12)], ], ) def test_unit(self, value, check_kwargs): def check(value, unit=None, h=1, s=1, us=0, ns=0): stamp = Timestamp(value, unit=unit) assert stamp.year == 2000 assert stamp.month == 1 assert stamp.day == 1 assert stamp.hour == h if unit != "D": assert stamp.minute == 1 assert stamp.second == s assert stamp.microsecond == us else: assert stamp.minute == 0 assert stamp.second == 0 assert stamp.microsecond == 0 assert stamp.nanosecond == ns check(value, **check_kwargs) def test_roundtrip(self): # test value to string and back conversions # further test accessors base = Timestamp("20140101 00:00:00") result = Timestamp(base.value + Timedelta("5ms").value) assert result == Timestamp(f"{base}.005000") assert result.microsecond == 5000 result = Timestamp(base.value + Timedelta("5us").value) assert result == Timestamp(f"{base}.000005") assert result.microsecond == 5 result = Timestamp(base.value + Timedelta("5ns").value) assert result == Timestamp(f"{base}.000000005") assert result.nanosecond == 5 assert result.microsecond == 0 result = Timestamp(base.value + Timedelta("6ms 5us").value) assert result == Timestamp(f"{base}.006005") assert result.microsecond == 5 + 6 * 1000 result = Timestamp(base.value + Timedelta("200ms 5us").value) assert result == Timestamp(f"{base}.200005") assert result.microsecond == 5 + 200 * 1000 def test_hash_equivalent(self): d = {datetime(2011, 1, 1): 5} stamp = Timestamp(datetime(2011, 1, 1)) assert d[stamp] == 5 def test_tz_conversion_freq(self, tz_naive_fixture): # GH25241 t1 = Timestamp("2019-01-01 10:00", freq="H") assert t1.tz_localize(tz=tz_naive_fixture).freq == t1.freq t2 = Timestamp("2019-01-02 12:00", tz="UTC", freq="T") assert t2.tz_convert(tz="UTC").freq == t2.freq class TestTimestampNsOperations: def test_nanosecond_string_parsing(self): ts = Timestamp("2013-05-01 07:15:45.123456789") # GH 7878 expected_repr = "2013-05-01 07:15:45.123456789" expected_value = 1_367_392_545_123_456_789 assert ts.value == expected_value assert expected_repr in repr(ts) ts = Timestamp("2013-05-01 07:15:45.123456789+09:00", tz="Asia/Tokyo") assert ts.value == expected_value - 9 * 3600 * 1_000_000_000 assert expected_repr in repr(ts) ts = Timestamp("2013-05-01 07:15:45.123456789", tz="UTC") assert ts.value == expected_value assert expected_repr in repr(ts) ts = Timestamp("2013-05-01 07:15:45.123456789", tz="US/Eastern") assert ts.value == expected_value + 4 * 3600 * 1_000_000_000 assert expected_repr in repr(ts) # GH 10041 ts = Timestamp("20130501T071545.123456789") assert ts.value == expected_value assert expected_repr in repr(ts) def test_nanosecond_timestamp(self): # GH 7610 expected = 1_293_840_000_000_000_005 t = Timestamp("2011-01-01") + offsets.Nano(5) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000005')" assert t.value == expected assert t.nanosecond == 5 t = Timestamp(t) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000005')" assert t.value == expected assert t.nanosecond == 5 t = Timestamp(np_datetime64_compat("2011-01-01 00:00:00.000000005Z")) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000005')" assert t.value == expected assert t.nanosecond == 5 expected = 1_293_840_000_000_000_010 t = t + offsets.Nano(5) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000010')" assert t.value == expected assert t.nanosecond == 10 t = Timestamp(t) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000010')" assert t.value == expected assert t.nanosecond == 10 t = Timestamp(np_datetime64_compat("2011-01-01 00:00:00.000000010Z")) assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000010')" assert t.value == expected assert t.nanosecond == 10 class TestTimestampToJulianDate: def test_compare_1700(self): r = Timestamp("1700-06-23").to_julian_date() assert r == 2_342_145.5 def test_compare_2000(self): r = Timestamp("2000-04-12").to_julian_date() assert r == 2_451_646.5 def test_compare_2100(self): r = Timestamp("2100-08-12").to_julian_date() assert r == 2_488_292.5 def test_compare_hour01(self): r = Timestamp("2000-08-12T01:00:00").to_julian_date() assert r == 2_451_768.5416666666666666 def test_compare_hour13(self): r = Timestamp("2000-08-12T13:00:00").to_julian_date() assert r == 2_451_769.0416666666666666 class TestTimestampConversion: def test_conversion(self): # GH#9255 ts = Timestamp("2000-01-01") result = ts.to_pydatetime() expected = datetime(2000, 1, 1) assert result == expected assert type(result) == type(expected) result = ts.to_datetime64() expected = np.datetime64(ts.value, "ns") assert result == expected assert type(result) == type(expected) assert result.dtype == expected.dtype def test_to_pydatetime_nonzero_nano(self): ts = Timestamp("2011-01-01 9:00:00.123456789") # Warn the user of data loss (nanoseconds). with tm.assert_produces_warning(UserWarning, check_stacklevel=False): expected = datetime(2011, 1, 1, 9, 0, 0, 123456) result = ts.to_pydatetime() assert result == expected def test_timestamp_to_datetime(self): stamp = Timestamp("20090415", tz="US/Eastern", freq="D") dtval = stamp.to_pydatetime() assert stamp == dtval assert stamp.tzinfo == dtval.tzinfo def test_timestamp_to_datetime_dateutil(self): stamp = Timestamp("20090415", tz="dateutil/US/Eastern", freq="D") dtval = stamp.to_pydatetime() assert stamp == dtval assert stamp.tzinfo == dtval.tzinfo def test_timestamp_to_datetime_explicit_pytz(self): stamp = Timestamp("20090415", tz=pytz.timezone("US/Eastern"), freq="D") dtval = stamp.to_pydatetime() assert stamp == dtval assert stamp.tzinfo == dtval.tzinfo @td.skip_if_windows_python_3 def test_timestamp_to_datetime_explicit_dateutil(self): stamp = Timestamp("20090415", tz=gettz("US/Eastern"), freq="D") dtval = stamp.to_pydatetime() assert stamp == dtval assert stamp.tzinfo == dtval.tzinfo def test_to_datetime_bijective(self): # Ensure that converting to datetime and back only loses precision # by going from nanoseconds to microseconds. exp_warning = None if Timestamp.max.nanosecond == 0 else UserWarning with tm.assert_produces_warning(exp_warning, check_stacklevel=False): assert ( Timestamp(Timestamp.max.to_pydatetime()).value / 1000 == Timestamp.max.value / 1000 ) exp_warning = None if Timestamp.min.nanosecond == 0 else UserWarning with tm.assert_produces_warning(exp_warning, check_stacklevel=False): assert ( Timestamp(Timestamp.min.to_pydatetime()).value / 1000 == Timestamp.min.value / 1000 ) def test_to_period_tz_warning(self): # GH#21333 make sure a warning is issued when timezone # info is lost ts = Timestamp("2009-04-15 16:17:18", tz="US/Eastern") with tm.assert_produces_warning(UserWarning): # warning that timezone info will be lost ts.to_period("D") def test_to_numpy_alias(self): # GH 24653: alias .to_numpy() for scalars ts = Timestamp(datetime.now()) assert ts.to_datetime64() == ts.to_numpy() class SubDatetime(datetime): pass @pytest.mark.parametrize( "lh,rh", [ (SubDatetime(2000, 1, 1), Timedelta(hours=1)), (Timedelta(hours=1), SubDatetime(2000, 1, 1)), ], ) def test_dt_subclass_add_timedelta(lh, rh): # GH#25851 # ensure that subclassed datetime works for # Timedelta operations result = lh + rh expected = SubDatetime(2000, 1, 1, 1) assert result == expected
TomAugspurger/pandas
pandas/tests/scalar/timestamp/test_timestamp.py
pandas/core/arrays/_ranges.py
from pandas.compat import StringIO from pandas import read_sas import pandas.util.testing as tm class TestSas(object): def test_sas_buffer_format(self): # see gh-14947 b = StringIO("") msg = ("If this is a buffer object rather than a string " "name, you must specify a format string") with tm.assert_raises_regex(ValueError, msg): read_sas(b)
# -*- coding: utf-8 -*- import pytest import dateutil import pytz # noqa # a test below uses pytz but only inside a `eval` call import pprint from distutils.version import LooseVersion from pandas import Timestamp class TestTimestampRendering(object): # dateutil zone change (only matters for repr) if LooseVersion(dateutil.__version__) >= LooseVersion('2.6.0'): timezones = ['UTC', 'Asia/Tokyo', 'US/Eastern', 'dateutil/US/Pacific'] else: timezones = ['UTC', 'Asia/Tokyo', 'US/Eastern', 'dateutil/America/Los_Angeles'] @pytest.mark.parametrize('tz', timezones) @pytest.mark.parametrize('freq', ['D', 'M', 'S', 'N']) @pytest.mark.parametrize('date', ['2014-03-07', '2014-01-01 09:00', '2014-01-01 00:00:00.000000001']) def test_repr(self, date, freq, tz): # avoid to match with timezone name freq_repr = "'{0}'".format(freq) if tz.startswith('dateutil'): tz_repr = tz.replace('dateutil', '') else: tz_repr = tz date_only = Timestamp(date) assert date in repr(date_only) assert tz_repr not in repr(date_only) assert freq_repr not in repr(date_only) assert date_only == eval(repr(date_only)) date_tz = Timestamp(date, tz=tz) assert date in repr(date_tz) assert tz_repr in repr(date_tz) assert freq_repr not in repr(date_tz) assert date_tz == eval(repr(date_tz)) date_freq = Timestamp(date, freq=freq) assert date in repr(date_freq) assert tz_repr not in repr(date_freq) assert freq_repr in repr(date_freq) assert date_freq == eval(repr(date_freq)) date_tz_freq = Timestamp(date, tz=tz, freq=freq) assert date in repr(date_tz_freq) assert tz_repr in repr(date_tz_freq) assert freq_repr in repr(date_tz_freq) assert date_tz_freq == eval(repr(date_tz_freq)) def test_repr_utcoffset(self): # This can cause the tz field to be populated, but it's redundant to # include this information in the date-string. date_with_utc_offset = Timestamp('2014-03-13 00:00:00-0400', tz=None) assert '2014-03-13 00:00:00-0400' in repr(date_with_utc_offset) assert 'tzoffset' not in repr(date_with_utc_offset) assert 'pytz.FixedOffset(-240)' in repr(date_with_utc_offset) expr = repr(date_with_utc_offset).replace("'pytz.FixedOffset(-240)'", 'pytz.FixedOffset(-240)') assert date_with_utc_offset == eval(expr) def test_timestamp_repr_pre1900(self): # pre-1900 stamp = Timestamp('1850-01-01', tz='US/Eastern') repr(stamp) iso8601 = '1850-01-01 01:23:45.012345' stamp = Timestamp(iso8601, tz='US/Eastern') result = repr(stamp) assert iso8601 in result def test_pprint(self): # GH#12622 nested_obj = {'foo': 1, 'bar': [{'w': {'a': Timestamp('2011-01-01')}}] * 10} result = pprint.pformat(nested_obj, width=50) expected = r"""{'bar': [{'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}], 'foo': 1}""" assert result == expected
amolkahat/pandas
pandas/tests/scalar/timestamp/test_rendering.py
pandas/tests/io/sas/test_sas.py
#!/usr/bin/env python """Top level ``eval`` module. """ import warnings import tokenize from pandas.io.formats.printing import pprint_thing from pandas.core.computation.scope import _ensure_scope from pandas.compat import string_types from pandas.core.computation.engines import _engines from pandas.util._validators import validate_bool_kwarg def _check_engine(engine): """Make sure a valid engine is passed. Parameters ---------- engine : str Raises ------ KeyError * If an invalid engine is passed ImportError * If numexpr was requested but doesn't exist Returns ------- string engine """ from pandas.core.computation.check import _NUMEXPR_INSTALLED if engine is None: if _NUMEXPR_INSTALLED: engine = 'numexpr' else: engine = 'python' if engine not in _engines: valid = list(_engines.keys()) raise KeyError('Invalid engine {engine!r} passed, valid engines are' ' {valid}'.format(engine=engine, valid=valid)) # TODO: validate this in a more general way (thinking of future engines # that won't necessarily be import-able) # Could potentially be done on engine instantiation if engine == 'numexpr': if not _NUMEXPR_INSTALLED: raise ImportError("'numexpr' is not installed or an " "unsupported version. Cannot use " "engine='numexpr' for query/eval " "if 'numexpr' is not installed") return engine def _check_parser(parser): """Make sure a valid parser is passed. Parameters ---------- parser : str Raises ------ KeyError * If an invalid parser is passed """ from pandas.core.computation.expr import _parsers if parser not in _parsers: raise KeyError('Invalid parser {parser!r} passed, valid parsers are' ' {valid}'.format(parser=parser, valid=_parsers.keys())) def _check_resolvers(resolvers): if resolvers is not None: for resolver in resolvers: if not hasattr(resolver, '__getitem__'): name = type(resolver).__name__ raise TypeError('Resolver of type {name!r} does not implement ' 'the __getitem__ method'.format(name=name)) def _check_expression(expr): """Make sure an expression is not an empty string Parameters ---------- expr : object An object that can be converted to a string Raises ------ ValueError * If expr is an empty string """ if not expr: raise ValueError("expr cannot be an empty string") def _convert_expression(expr): """Convert an object to an expression. Thus function converts an object to an expression (a unicode string) and checks to make sure it isn't empty after conversion. This is used to convert operators to their string representation for recursive calls to :func:`~pandas.eval`. Parameters ---------- expr : object The object to be converted to a string. Returns ------- s : unicode The string representation of an object. Raises ------ ValueError * If the expression is empty. """ s = pprint_thing(expr) _check_expression(s) return s def _check_for_locals(expr, stack_level, parser): from pandas.core.computation.expr import tokenize_string at_top_of_stack = stack_level == 0 not_pandas_parser = parser != 'pandas' if not_pandas_parser: msg = "The '@' prefix is only supported by the pandas parser" elif at_top_of_stack: msg = ("The '@' prefix is not allowed in " "top-level eval calls, \nplease refer to " "your variables by name without the '@' " "prefix") if at_top_of_stack or not_pandas_parser: for toknum, tokval in tokenize_string(expr): if toknum == tokenize.OP and tokval == '@': raise SyntaxError(msg) def eval(expr, parser='pandas', engine=None, truediv=True, local_dict=None, global_dict=None, resolvers=(), level=0, target=None, inplace=False): """Evaluate a Python expression as a string using various backends. The following arithmetic operations are supported: ``+``, ``-``, ``*``, ``/``, ``**``, ``%``, ``//`` (python engine only) along with the following boolean operations: ``|`` (or), ``&`` (and), and ``~`` (not). Additionally, the ``'pandas'`` parser allows the use of :keyword:`and`, :keyword:`or`, and :keyword:`not` with the same semantics as the corresponding bitwise operators. :class:`~pandas.Series` and :class:`~pandas.DataFrame` objects are supported and behave as they would with plain ol' Python evaluation. Parameters ---------- expr : str or unicode The expression to evaluate. This string cannot contain any Python `statements <https://docs.python.org/3/reference/simple_stmts.html#simple-statements>`__, only Python `expressions <https://docs.python.org/3/reference/simple_stmts.html#expression-statements>`__. parser : string, default 'pandas', {'pandas', 'python'} The parser to use to construct the syntax tree from the expression. The default of ``'pandas'`` parses code slightly different than standard Python. Alternatively, you can parse an expression using the ``'python'`` parser to retain strict Python semantics. See the :ref:`enhancing performance <enhancingperf.eval>` documentation for more details. engine : string or None, default 'numexpr', {'python', 'numexpr'} The engine used to evaluate the expression. Supported engines are - None : tries to use ``numexpr``, falls back to ``python`` - ``'numexpr'``: This default engine evaluates pandas objects using numexpr for large speed ups in complex expressions with large frames. - ``'python'``: Performs operations as if you had ``eval``'d in top level python. This engine is generally not that useful. More backends may be available in the future. truediv : bool, optional Whether to use true division, like in Python >= 3 local_dict : dict or None, optional A dictionary of local variables, taken from locals() by default. global_dict : dict or None, optional A dictionary of global variables, taken from globals() by default. resolvers : list of dict-like or None, optional A list of objects implementing the ``__getitem__`` special method that you can use to inject an additional collection of namespaces to use for variable lookup. For example, this is used in the :meth:`~pandas.DataFrame.query` method to inject the ``DataFrame.index`` and ``DataFrame.columns`` variables that refer to their respective :class:`~pandas.DataFrame` instance attributes. level : int, optional The number of prior stack frames to traverse and add to the current scope. Most users will **not** need to change this parameter. target : object, optional, default None This is the target object for assignment. It is used when there is variable assignment in the expression. If so, then `target` must support item assignment with string keys, and if a copy is being returned, it must also support `.copy()`. inplace : bool, default False If `target` is provided, and the expression mutates `target`, whether to modify `target` inplace. Otherwise, return a copy of `target` with the mutation. Returns ------- ndarray, numeric scalar, DataFrame, Series Raises ------ ValueError There are many instances where such an error can be raised: - `target=None`, but the expression is multiline. - The expression is multiline, but not all them have item assignment. An example of such an arrangement is this: a = b + 1 a + 2 Here, there are expressions on different lines, making it multiline, but the last line has no variable assigned to the output of `a + 2`. - `inplace=True`, but the expression is missing item assignment. - Item assignment is provided, but the `target` does not support string item assignment. - Item assignment is provided and `inplace=False`, but the `target` does not support the `.copy()` method Notes ----- The ``dtype`` of any objects involved in an arithmetic ``%`` operation are recursively cast to ``float64``. See the :ref:`enhancing performance <enhancingperf.eval>` documentation for more details. See Also -------- pandas.DataFrame.query pandas.DataFrame.eval """ from pandas.core.computation.expr import Expr inplace = validate_bool_kwarg(inplace, "inplace") if isinstance(expr, string_types): _check_expression(expr) exprs = [e.strip() for e in expr.splitlines() if e.strip() != ''] else: exprs = [expr] multi_line = len(exprs) > 1 if multi_line and target is None: raise ValueError("multi-line expressions are only valid in the " "context of data, use DataFrame.eval") ret = None first_expr = True target_modified = False for expr in exprs: expr = _convert_expression(expr) engine = _check_engine(engine) _check_parser(parser) _check_resolvers(resolvers) _check_for_locals(expr, level, parser) # get our (possibly passed-in) scope env = _ensure_scope(level + 1, global_dict=global_dict, local_dict=local_dict, resolvers=resolvers, target=target) parsed_expr = Expr(expr, engine=engine, parser=parser, env=env, truediv=truediv) # construct the engine and evaluate the parsed expression eng = _engines[engine] eng_inst = eng(parsed_expr) ret = eng_inst.evaluate() if parsed_expr.assigner is None: if multi_line: raise ValueError("Multi-line expressions are only valid" " if all expressions contain an assignment") elif inplace: raise ValueError("Cannot operate inplace " "if there is no assignment") # assign if needed assigner = parsed_expr.assigner if env.target is not None and assigner is not None: target_modified = True # if returning a copy, copy only on the first assignment if not inplace and first_expr: try: target = env.target.copy() except AttributeError: raise ValueError("Cannot return a copy of the target") else: target = env.target # TypeError is most commonly raised (e.g. int, list), but you # get IndexError if you try to do this assignment on np.ndarray. # we will ignore numpy warnings here; e.g. if trying # to use a non-numeric indexer try: with warnings.catch_warnings(record=True): # TODO: Filter the warnings we actually care about here. target[assigner] = ret except (TypeError, IndexError): raise ValueError("Cannot assign expression output to target") if not resolvers: resolvers = ({assigner: ret},) else: # existing resolver needs updated to handle # case of mutating existing column in copy for resolver in resolvers: if assigner in resolver: resolver[assigner] = ret break else: resolvers += ({assigner: ret},) ret = None first_expr = False # We want to exclude `inplace=None` as being False. if inplace is False: return target if target_modified else ret
# -*- coding: utf-8 -*- import pytest import dateutil import pytz # noqa # a test below uses pytz but only inside a `eval` call import pprint from distutils.version import LooseVersion from pandas import Timestamp class TestTimestampRendering(object): # dateutil zone change (only matters for repr) if LooseVersion(dateutil.__version__) >= LooseVersion('2.6.0'): timezones = ['UTC', 'Asia/Tokyo', 'US/Eastern', 'dateutil/US/Pacific'] else: timezones = ['UTC', 'Asia/Tokyo', 'US/Eastern', 'dateutil/America/Los_Angeles'] @pytest.mark.parametrize('tz', timezones) @pytest.mark.parametrize('freq', ['D', 'M', 'S', 'N']) @pytest.mark.parametrize('date', ['2014-03-07', '2014-01-01 09:00', '2014-01-01 00:00:00.000000001']) def test_repr(self, date, freq, tz): # avoid to match with timezone name freq_repr = "'{0}'".format(freq) if tz.startswith('dateutil'): tz_repr = tz.replace('dateutil', '') else: tz_repr = tz date_only = Timestamp(date) assert date in repr(date_only) assert tz_repr not in repr(date_only) assert freq_repr not in repr(date_only) assert date_only == eval(repr(date_only)) date_tz = Timestamp(date, tz=tz) assert date in repr(date_tz) assert tz_repr in repr(date_tz) assert freq_repr not in repr(date_tz) assert date_tz == eval(repr(date_tz)) date_freq = Timestamp(date, freq=freq) assert date in repr(date_freq) assert tz_repr not in repr(date_freq) assert freq_repr in repr(date_freq) assert date_freq == eval(repr(date_freq)) date_tz_freq = Timestamp(date, tz=tz, freq=freq) assert date in repr(date_tz_freq) assert tz_repr in repr(date_tz_freq) assert freq_repr in repr(date_tz_freq) assert date_tz_freq == eval(repr(date_tz_freq)) def test_repr_utcoffset(self): # This can cause the tz field to be populated, but it's redundant to # include this information in the date-string. date_with_utc_offset = Timestamp('2014-03-13 00:00:00-0400', tz=None) assert '2014-03-13 00:00:00-0400' in repr(date_with_utc_offset) assert 'tzoffset' not in repr(date_with_utc_offset) assert 'pytz.FixedOffset(-240)' in repr(date_with_utc_offset) expr = repr(date_with_utc_offset).replace("'pytz.FixedOffset(-240)'", 'pytz.FixedOffset(-240)') assert date_with_utc_offset == eval(expr) def test_timestamp_repr_pre1900(self): # pre-1900 stamp = Timestamp('1850-01-01', tz='US/Eastern') repr(stamp) iso8601 = '1850-01-01 01:23:45.012345' stamp = Timestamp(iso8601, tz='US/Eastern') result = repr(stamp) assert iso8601 in result def test_pprint(self): # GH#12622 nested_obj = {'foo': 1, 'bar': [{'w': {'a': Timestamp('2011-01-01')}}] * 10} result = pprint.pformat(nested_obj, width=50) expected = r"""{'bar': [{'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}], 'foo': 1}""" assert result == expected
amolkahat/pandas
pandas/tests/scalar/timestamp/test_rendering.py
pandas/core/computation/eval.py
"""Global fixtures for depot tests""" import fauxfactory import pytest from wrapanapi import VmState from cfme.utils.conf import cfme_data from cfme.utils.log import logger from cfme.utils.net import find_pingable from cfme.utils.net import find_pingable_ipv6 from cfme.utils.net import pick_responding_ip from cfme.utils.virtual_machines import deploy_template from cfme.utils.wait import TimedOutError from cfme.utils.wait import wait_for FTP_PORT = 21 @pytest.fixture(scope="module") def depot_machine_ip(request, appliance): """ Deploy vm for depot test This fixture uses for deploy vm on provider from yaml and then receive it's ip After test run vm deletes from provider """ try: # use long-test name so it has a longer life before automatic cleanup data = cfme_data.log_db_operations vm = deploy_template( data.log_db_depot_template.provider, fauxfactory.gen_alphanumeric(26, start="long-test-depot-"), template_name=data.log_db_depot_template.template_name ) vm.ensure_state(VmState.RUNNING) except AttributeError: msg = 'Missing some yaml information necessary to deploy depot VM' logger.exception(msg) pytest.skip(msg) try: # TODO It would be better to use retry_connect here, but this requires changes to other # fixtures. found_ip = pick_responding_ip(lambda: vm.all_ips, FTP_PORT, 300, 5, 10) except TimedOutError: msg = 'Timed out waiting for reachable depot VM IP' logger.exception(msg) pytest.skip(msg) yield found_ip vm.cleanup() @pytest.fixture(scope="module") def depot_machine_ipv4_and_ipv6(request, appliance): """ Deploy vm for depot test This fixture is used for deploying a vm on a provider from the yamls and getting its ip (both ipv4 and ipv6) After test run vm deletes from provider """ try: # use long-test name so it has a longer life before automatic cleanup data = cfme_data.log_db_operations vm = deploy_template( data.log_db_depot_template.provider, f"long-test-depot-{fauxfactory.gen_alphanumeric()}", template_name=data.log_db_depot_template.template_name ) vm.ensure_state(VmState.RUNNING) except AttributeError: msg = 'Missing some yaml information necessary to deploy depot VM' logger.exception(msg) pytest.skip(msg) try: ipv4, _ = wait_for( find_pingable, func_args=[vm, False], fail_condition=None, delay=5, num_sec=300 ) ipv6, _ = wait_for( find_pingable_ipv6, func_args=[vm], fail_condition=None, delay=5, num_sec=300 ) except TimedOutError: msg = 'Timed out waiting for reachable depot VM IP' logger.exception(msg) pytest.skip(msg) yield ipv4, ipv6 vm.cleanup()
import fauxfactory import pytest from wait_for import wait_for from widgetastic.exceptions import RowNotFound from cfme import test_requirements from cfme.cloud.provider.openstack import OpenStackProvider from cfme.configure.configuration.region_settings import ReplicationGlobalAddView from cfme.configure.configuration.region_settings import ReplicationGlobalView from cfme.fixtures.cli import provider_app_crud from cfme.infrastructure.provider import InfraProvider from cfme.infrastructure.provider.rhevm import RHEVMProvider from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.conf import credentials from cfme.utils.log import logger pytestmark = [test_requirements.replication, pytest.mark.long_running] def create_vm(provider, vm_name): collection = provider.appliance.provider_based_collection(provider) try: template_name = provider.data['templates']['full_template']['name'] except KeyError: pytest.skip(f'Unable to identify full_template for provider: {provider}') vm = collection.instantiate( vm_name, provider, template_name=template_name ) vm.create_on_provider(find_in_cfme=True, allow_skip="default") return vm def are_dicts_same(dict1, dict2): logger.info(f"Comparing two dictionaries\n dict1:{dict1}\n dict2:{dict2}") if set(dict1) != set(dict2): return False for key in dict1.keys(): if set(dict1[key]) != set(dict2[key]): return False return True @pytest.fixture def setup_replication(configured_appliance, unconfigured_appliance): """Configure global_app database with region number 99 and subscribe to remote_app.""" remote_app, global_app = configured_appliance, unconfigured_appliance app_params = dict( region=99, dbhostname='localhost', username=credentials["database"]["username"], password=credentials["database"]["password"], dbname='vmdb_production', dbdisk=global_app.unpartitioned_disks[0], fetch_key=remote_app.hostname, sshlogin=credentials["ssh"]["username"], sshpass=credentials["ssh"]["password"], ) global_app.appliance_console_cli.configure_appliance_internal_fetch_key(**app_params) global_app.evmserverd.wait_for_running() global_app.wait_for_miq_ready() remote_app.set_pglogical_replication(replication_type=':remote') global_app.set_pglogical_replication(replication_type=':global') global_app.add_pglogical_replication_subscription(remote_app.hostname) return configured_appliance, unconfigured_appliance @pytest.mark.provider([OpenStackProvider]) def test_replication_powertoggle(request, provider, setup_replication, small_template): """ power toggle from global to remote Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/12h testSteps: 1. Have a VM created in the provider in the Remote region subscribed to Global. 2. Turn the VM off using the Global appliance. 3. Turn the VM on using the Global appliance. expectedResults: 1. 2. VM state changes to off in the Remote and Global appliance. 3. VM state changes to on in the Remote and Global appliance. """ instance_name = fauxfactory.gen_alphanumeric(start="test_replication_", length=25).lower() remote_app, global_app = setup_replication provider.appliance = remote_app provider.setup() remote_instance = remote_app.collections.cloud_instances.instantiate( instance_name, provider, small_template.name ) global_instance = global_app.collections.cloud_instances.instantiate(instance_name, provider) # Create instance remote_instance.create_on_provider(find_in_cfme=True) request.addfinalizer(remote_instance.cleanup_on_provider) remote_instance.wait_for_instance_state_change(desired_state=remote_instance.STATE_ON) # Power OFF instance using global appliance global_instance.power_control_from_cfme(option=global_instance.STOP) # Assert instance power off state from both remote and global appliance assert global_instance.wait_for_instance_state_change( desired_state=global_instance.STATE_OFF ).out assert remote_instance.wait_for_instance_state_change( desired_state=remote_instance.STATE_OFF ).out # Power ON instance using global appliance global_instance.power_control_from_cfme(option=global_instance.START) # Assert instance power ON state from both remote and global appliance assert global_instance.wait_for_instance_state_change( desired_state=global_instance.STATE_ON ).out assert remote_instance.wait_for_instance_state_change( desired_state=global_instance.STATE_ON ).out @pytest.mark.tier(2) def test_replication_appliance_add_single_subscription(setup_replication): """ Add one remote subscription to global region Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/12h startsin: 5.7 testSteps: 1. Configure first appliance as Global. 2. Configure second appliance as Remote, subscribed to Global. expectedResults: 1. 2. No error. Appliance subscribed. """ remote_app, global_app = setup_replication region = global_app.collections.regions.instantiate() assert region.replication.get_replication_status(host=remote_app.hostname) @pytest.mark.tier(3) def test_replication_re_add_deleted_remote(setup_replication): """ Re-add deleted remote region Polarion: assignee: dgaikwad casecomponent: Replication initialEstimate: 1/12h testSteps: 1. Have A Remote subscribed to Global. 2. Remove the Remote subscription from Global. 3. Add the Remote to Global again expectedResults: 1. 2. Subscription is successfully removed. 3. No error. Appliance subscribed. """ remote_app, global_app = setup_replication region = global_app.collections.regions.instantiate() # Remove the Remote subscription from Global and make sure it is removed region.replication.remove_global_appliance(host=remote_app.hostname) with pytest.raises(RowNotFound): region.replication.get_replication_status(host=remote_app.hostname) # Add the Remote to Global again global_app.set_pglogical_replication(replication_type=":global") global_app.add_pglogical_replication_subscription(remote_app.hostname) # Assert the hostname is present view = region.replication.create_view(ReplicationGlobalView) view.browser.refresh() assert region.replication.get_replication_status(host=remote_app.hostname) @pytest.mark.tier(3) def test_replication_delete_remote_from_global(setup_replication): """ Delete remote subscription from global region Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/5h testSteps: 1. Have A Remote subscribed to Global. 2. Remove the Remote subscription from Global. expectedResults: 1. 2. No error. Appliance unsubscribed. """ remote_app, global_app = setup_replication region = global_app.collections.regions.instantiate() # Remove the Remote subscription from Global region.replication.remove_global_appliance(host=remote_app.hostname) with pytest.raises(RowNotFound): region.replication.get_replication_status(host=remote_app.hostname) @pytest.mark.tier(1) def test_replication_remote_to_global_by_ip_pglogical(setup_replication): """ Test replication from remote region to global using any data type (provider,event,etc) Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/4h startsin: 5.6 testSteps: 1. Have A Remote subscribed to Global. 2. Create a provider in remote region. 3. Check the provider appeared in the Global. expectedResults: 1. 2. 3. Provider appeared in the Global. """ remote_app, global_app = setup_replication provider = provider_app_crud(OpenStackProvider, remote_app) provider.setup() # Assert the provider is replicated to global appliance assert provider.name in global_app.managed_provider_names, "Provider name not found" @pytest.mark.tier(1) def test_replication_appliance_set_type_global_ui(configured_appliance, unconfigured_appliance): """ Set appliance replication type to "Global" and add subscription in the UI Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/6h testtype: functional testSteps: 1. Have two appliances with same v2 keys and different regions 2. Set one as Global and the other as Remote and add subscribe the Remote to the Global expectedResults: 1. 2. No error, appliance subscribed. """ remote_app, global_app = configured_appliance, unconfigured_appliance app_params = dict( region=99, dbhostname='localhost', username=credentials["database"]["username"], password=credentials["database"]["password"], dbname='vmdb_production', dbdisk=global_app.unpartitioned_disks[0], fetch_key=remote_app.hostname, sshlogin=credentials["ssh"]["username"], sshpass=credentials["ssh"]["password"], ) global_app.appliance_console_cli.configure_appliance_internal_fetch_key(**app_params) global_app.evmserverd.wait_for_running() global_app.wait_for_miq_ready() # Making configured app to Remote Appliance using UI remote_region = remote_app.collections.regions.instantiate() remote_region.replication.set_replication(replication_type="remote") # Adding Remote Appliance into Global appliance using UI global_region = global_app.collections.regions.instantiate(number=99) global_region.replication.set_replication( replication_type="global", updates={"host": remote_app.hostname}, validate=True) # Validating replication assert global_region.replication.get_replication_status( host=remote_app.hostname), "Replication is not started." @pytest.mark.tier(2) @pytest.mark.parametrize("temp_appliances_unconfig_modscope_rhevm", [3], indirect=True) def test_replication_appliance_add_multi_subscription(request, setup_multi_region_cluster, multi_region_cluster, temp_appliances_unconfig_modscope_rhevm): """ add two or more subscriptions to global Polarion: assignee: dgaikwad casecomponent: Replication initialEstimate: 1/4h startsin: 5.7 testSteps: 1. Have three appliances with same v2 keys and different regions 2. Set one as Global and the other two as Remote and add subscribe the Remotes to the Global expectedResults: 1. 2. appliances subscribed. """ region = multi_region_cluster.global_appliance.collections.regions.instantiate() navigate_to(region.replication, "Global") for host in multi_region_cluster.remote_appliances: assert region.replication.get_replication_status( host=host.hostname ), f"{host.hostname} Remote Appliance is not found in Global Appliance's list" @pytest.mark.tier(1) def test_replication_global_region_dashboard(request, setup_replication): """ Global dashboard show remote data Polarion: assignee: dgaikwad casecomponent: Replication initialEstimate: 1/4h testSteps: 1. Have a VM created in the provider in the Remote region which is subscribed to Global. 2. Check the dashboard on the Global shows data from the Remote region. expectedResults: 1. 2. Dashboard on the Global displays data from the Remote region """ remote_app, global_app = setup_replication remote_provider = provider_app_crud(InfraProvider, remote_app) remote_provider.setup() assert remote_provider.name in remote_app.managed_provider_names, "Provider is not available." new_vm_name = fauxfactory.gen_alphanumeric(start="test_rep_dashboard", length=25).lower() vm = create_vm(provider=remote_provider, vm_name=new_vm_name) request.addfinalizer(vm.cleanup_on_provider) data_items = ('EVM: Recently Discovered Hosts', 'EVM: Recently Discovered VMs', 'Top Storage Consumers') remote_app_data, global_app_data = {}, {} def get_table_data(widget): ret = [row.name.text for row in widget.contents] logger.info("Widget text data:{%s}" % ret) return ret def data_check(view, table): return bool(get_table_data(view.dashboards("Default Dashboard").widgets(table))) view = navigate_to(remote_app.server, "Dashboard") for table_name in data_items: logger.info("Table name:{%s}" % table_name) wait_for( data_check, func_args=[view, table_name], delay=20, num_sec=600, fail_func=view.dashboards("Default Dashboard").browser.refresh, message=f"Waiting for table data item: {table_name} " ) remote_app_data[table_name] = get_table_data(view.dashboards( "Default Dashboard").widgets(table_name)) view = navigate_to(global_app.server, "Dashboard") for table_name in data_items: logger.info("Table name:{%s}" % table_name) wait_for( data_check, func_args=[view, table_name], delay=20, num_sec=600, fail_func=view.dashboards("Default Dashboard").browser.refresh, message=f"Waiting for table data item: {table_name}" ) global_app_data[table_name] = get_table_data(view.dashboards( "Default Dashboard").widgets(table_name)) # TODO(ndhandre): Widget not implemented so some widget not checking in this test case they are # 'Vendor and Guest OS Chart', 'Top Memory Consumers (weekly)', 'Top CPU Consumers (weekly)', # 'Virtual Infrastructure Platforms', 'Guest OS Information' assert are_dicts_same(remote_app_data, global_app_data), "Dashboard is not same of both app." @pytest.mark.tier(1) def test_replication_global_to_remote_new_vm_from_template(request, setup_replication): """ Create a new VM from template in remote region from global region Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/6h testSteps: 1. Configure first appliance as Global. 2. Configure second appliance as Remote, subscribed to Global. 3. Create a VM from template in Remote region using the Global appliance. expectedResults: 1. 2. 3. VM created in the Remote, no errors. """ remote_app, global_app = setup_replication remote_provider = provider_app_crud(RHEVMProvider, remote_app) remote_provider.setup() assert remote_provider.name in remote_app.managed_provider_names, "Provider is not available." new_vm_name = fauxfactory.gen_alphanumeric(start="test_replication_", length=25).lower() global_provider = provider_app_crud(RHEVMProvider, global_app) vm = create_vm(provider=global_provider, vm_name=new_vm_name) request.addfinalizer(vm.cleanup_on_provider) remote_provider.refresh_provider_relationships() assert (remote_app.collections.infra_vms.instantiate(new_vm_name, remote_provider).exists), ( f"{new_vm_name} vm is not found in Remote Appliance" ) @pytest.mark.tier(1) def test_replication_subscription_revalidation_pglogical(configured_appliance, unconfigured_appliance): """ Subscription validation passes for replication subscriptions which have been validated and successfully saved. Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: medium initialEstimate: 1/12h testSteps: 1. Attempt to validate the subscription expectedResults: 1. Validation succeeds as this subscription was successfully saved and is currently replicating """ remote_app, global_app = configured_appliance, unconfigured_appliance app_params = dict( region=99, dbhostname='localhost', username=credentials["database"]["username"], password=credentials["database"]["password"], dbname='vmdb_production', dbdisk=global_app.unpartitioned_disks[0], fetch_key=remote_app.hostname, sshlogin=credentials["ssh"]["username"], sshpass=credentials["ssh"]["password"], ) global_app.appliance_console_cli.configure_appliance_internal_fetch_key(**app_params) global_app.evmserverd.wait_for_running() global_app.wait_for_miq_ready() remote_app.set_pglogical_replication(replication_type=':remote') region = global_app.collections.regions.instantiate(number=99) region.replication.set_replication(replication_type="global", updates={"host": remote_app.hostname}, validate=True) @test_requirements.settings @test_requirements.multi_region @pytest.mark.tier(3) def test_replication_subscription_update(multi_region_cluster, setup_multi_region_cluster): """ Edit replication subscription Polarion: assignee: dgaikwad casecomponent: Configuration caseimportance: critical initialEstimate: 1/4h """ global_appliance = multi_region_cluster.global_appliance region = global_appliance.collections.regions.instantiate(number=99) # Update with bad password and verify that error flash message appears row = region.replication._global_replication_row() row[8].widget.click(handle_alert=True) view = region.replication.create_view(ReplicationGlobalAddView) view.fill({'username': 'bad_user'}) view.accept_button.click() view.action_dropdown.item_select('Validate') view.flash.assert_message("FATAL: password authentication failed", partial=True, t='error') row[8].widget.click(handle_alert=True) view.fill({'username': credentials.database.username}) view.accept_button.click() view.action_dropdown.item_select('Validate') view.flash.assert_success_message("Subscription Credentials validated successfully")
nachandr/cfme_tests
cfme/tests/test_replication.py
cfme/fixtures/depot.py
from widgetastic.widget import View from widgetastic_patternfly import AboutModal from cfme.exceptions import ItemNotFound from cfme.utils.appliance.implementations.ui import navigate_to # MIQ/CFME about field names VERSION = 'Version' SERVER = 'Server Name' USER = 'User Name' ROLE = 'User Role' BROWSER = 'Browser' BROWSER_VERSION = 'Browser Version' BROWSER_OS = 'Browser OS' ZONE = "Zone" REGION = "Region" class MIQAboutModal(AboutModal): """Override some locators that MIQ mangles""" CLOSE_LOC = './/div[@class="modal-header"]/button[@class="close"]' class AboutView(View): """ The view for the about modal """ @property def is_displayed(self): return self.modal.is_open modal = MIQAboutModal() # 5.10 has id, 5.11 does not, wt.pf doesn't need it. def get_detail(field, server): """ Open the about modal and fetch the value for one of the fields 'title' and 'trademark' fields are allowed and get the header/footer values Raises ItemNotFound if the field isn't in the about modal :param field: string label for the detail field :return: string value from the requested field """ view = navigate_to(server, 'About') try: if field.lower() in ['title', 'trademark']: return getattr(view.modal, field.lower()) else: # this is AboutModal.items function, TODO rename return view.modal.items()[field] except (KeyError, AttributeError): raise ItemNotFound(f'No field named {field} found in "About" modal.') finally: # close since its a blocking modal and will break further navigation view.modal.close()
import fauxfactory import pytest from wait_for import wait_for from widgetastic.exceptions import RowNotFound from cfme import test_requirements from cfme.cloud.provider.openstack import OpenStackProvider from cfme.configure.configuration.region_settings import ReplicationGlobalAddView from cfme.configure.configuration.region_settings import ReplicationGlobalView from cfme.fixtures.cli import provider_app_crud from cfme.infrastructure.provider import InfraProvider from cfme.infrastructure.provider.rhevm import RHEVMProvider from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.conf import credentials from cfme.utils.log import logger pytestmark = [test_requirements.replication, pytest.mark.long_running] def create_vm(provider, vm_name): collection = provider.appliance.provider_based_collection(provider) try: template_name = provider.data['templates']['full_template']['name'] except KeyError: pytest.skip(f'Unable to identify full_template for provider: {provider}') vm = collection.instantiate( vm_name, provider, template_name=template_name ) vm.create_on_provider(find_in_cfme=True, allow_skip="default") return vm def are_dicts_same(dict1, dict2): logger.info(f"Comparing two dictionaries\n dict1:{dict1}\n dict2:{dict2}") if set(dict1) != set(dict2): return False for key in dict1.keys(): if set(dict1[key]) != set(dict2[key]): return False return True @pytest.fixture def setup_replication(configured_appliance, unconfigured_appliance): """Configure global_app database with region number 99 and subscribe to remote_app.""" remote_app, global_app = configured_appliance, unconfigured_appliance app_params = dict( region=99, dbhostname='localhost', username=credentials["database"]["username"], password=credentials["database"]["password"], dbname='vmdb_production', dbdisk=global_app.unpartitioned_disks[0], fetch_key=remote_app.hostname, sshlogin=credentials["ssh"]["username"], sshpass=credentials["ssh"]["password"], ) global_app.appliance_console_cli.configure_appliance_internal_fetch_key(**app_params) global_app.evmserverd.wait_for_running() global_app.wait_for_miq_ready() remote_app.set_pglogical_replication(replication_type=':remote') global_app.set_pglogical_replication(replication_type=':global') global_app.add_pglogical_replication_subscription(remote_app.hostname) return configured_appliance, unconfigured_appliance @pytest.mark.provider([OpenStackProvider]) def test_replication_powertoggle(request, provider, setup_replication, small_template): """ power toggle from global to remote Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/12h testSteps: 1. Have a VM created in the provider in the Remote region subscribed to Global. 2. Turn the VM off using the Global appliance. 3. Turn the VM on using the Global appliance. expectedResults: 1. 2. VM state changes to off in the Remote and Global appliance. 3. VM state changes to on in the Remote and Global appliance. """ instance_name = fauxfactory.gen_alphanumeric(start="test_replication_", length=25).lower() remote_app, global_app = setup_replication provider.appliance = remote_app provider.setup() remote_instance = remote_app.collections.cloud_instances.instantiate( instance_name, provider, small_template.name ) global_instance = global_app.collections.cloud_instances.instantiate(instance_name, provider) # Create instance remote_instance.create_on_provider(find_in_cfme=True) request.addfinalizer(remote_instance.cleanup_on_provider) remote_instance.wait_for_instance_state_change(desired_state=remote_instance.STATE_ON) # Power OFF instance using global appliance global_instance.power_control_from_cfme(option=global_instance.STOP) # Assert instance power off state from both remote and global appliance assert global_instance.wait_for_instance_state_change( desired_state=global_instance.STATE_OFF ).out assert remote_instance.wait_for_instance_state_change( desired_state=remote_instance.STATE_OFF ).out # Power ON instance using global appliance global_instance.power_control_from_cfme(option=global_instance.START) # Assert instance power ON state from both remote and global appliance assert global_instance.wait_for_instance_state_change( desired_state=global_instance.STATE_ON ).out assert remote_instance.wait_for_instance_state_change( desired_state=global_instance.STATE_ON ).out @pytest.mark.tier(2) def test_replication_appliance_add_single_subscription(setup_replication): """ Add one remote subscription to global region Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/12h startsin: 5.7 testSteps: 1. Configure first appliance as Global. 2. Configure second appliance as Remote, subscribed to Global. expectedResults: 1. 2. No error. Appliance subscribed. """ remote_app, global_app = setup_replication region = global_app.collections.regions.instantiate() assert region.replication.get_replication_status(host=remote_app.hostname) @pytest.mark.tier(3) def test_replication_re_add_deleted_remote(setup_replication): """ Re-add deleted remote region Polarion: assignee: dgaikwad casecomponent: Replication initialEstimate: 1/12h testSteps: 1. Have A Remote subscribed to Global. 2. Remove the Remote subscription from Global. 3. Add the Remote to Global again expectedResults: 1. 2. Subscription is successfully removed. 3. No error. Appliance subscribed. """ remote_app, global_app = setup_replication region = global_app.collections.regions.instantiate() # Remove the Remote subscription from Global and make sure it is removed region.replication.remove_global_appliance(host=remote_app.hostname) with pytest.raises(RowNotFound): region.replication.get_replication_status(host=remote_app.hostname) # Add the Remote to Global again global_app.set_pglogical_replication(replication_type=":global") global_app.add_pglogical_replication_subscription(remote_app.hostname) # Assert the hostname is present view = region.replication.create_view(ReplicationGlobalView) view.browser.refresh() assert region.replication.get_replication_status(host=remote_app.hostname) @pytest.mark.tier(3) def test_replication_delete_remote_from_global(setup_replication): """ Delete remote subscription from global region Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/5h testSteps: 1. Have A Remote subscribed to Global. 2. Remove the Remote subscription from Global. expectedResults: 1. 2. No error. Appliance unsubscribed. """ remote_app, global_app = setup_replication region = global_app.collections.regions.instantiate() # Remove the Remote subscription from Global region.replication.remove_global_appliance(host=remote_app.hostname) with pytest.raises(RowNotFound): region.replication.get_replication_status(host=remote_app.hostname) @pytest.mark.tier(1) def test_replication_remote_to_global_by_ip_pglogical(setup_replication): """ Test replication from remote region to global using any data type (provider,event,etc) Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/4h startsin: 5.6 testSteps: 1. Have A Remote subscribed to Global. 2. Create a provider in remote region. 3. Check the provider appeared in the Global. expectedResults: 1. 2. 3. Provider appeared in the Global. """ remote_app, global_app = setup_replication provider = provider_app_crud(OpenStackProvider, remote_app) provider.setup() # Assert the provider is replicated to global appliance assert provider.name in global_app.managed_provider_names, "Provider name not found" @pytest.mark.tier(1) def test_replication_appliance_set_type_global_ui(configured_appliance, unconfigured_appliance): """ Set appliance replication type to "Global" and add subscription in the UI Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/6h testtype: functional testSteps: 1. Have two appliances with same v2 keys and different regions 2. Set one as Global and the other as Remote and add subscribe the Remote to the Global expectedResults: 1. 2. No error, appliance subscribed. """ remote_app, global_app = configured_appliance, unconfigured_appliance app_params = dict( region=99, dbhostname='localhost', username=credentials["database"]["username"], password=credentials["database"]["password"], dbname='vmdb_production', dbdisk=global_app.unpartitioned_disks[0], fetch_key=remote_app.hostname, sshlogin=credentials["ssh"]["username"], sshpass=credentials["ssh"]["password"], ) global_app.appliance_console_cli.configure_appliance_internal_fetch_key(**app_params) global_app.evmserverd.wait_for_running() global_app.wait_for_miq_ready() # Making configured app to Remote Appliance using UI remote_region = remote_app.collections.regions.instantiate() remote_region.replication.set_replication(replication_type="remote") # Adding Remote Appliance into Global appliance using UI global_region = global_app.collections.regions.instantiate(number=99) global_region.replication.set_replication( replication_type="global", updates={"host": remote_app.hostname}, validate=True) # Validating replication assert global_region.replication.get_replication_status( host=remote_app.hostname), "Replication is not started." @pytest.mark.tier(2) @pytest.mark.parametrize("temp_appliances_unconfig_modscope_rhevm", [3], indirect=True) def test_replication_appliance_add_multi_subscription(request, setup_multi_region_cluster, multi_region_cluster, temp_appliances_unconfig_modscope_rhevm): """ add two or more subscriptions to global Polarion: assignee: dgaikwad casecomponent: Replication initialEstimate: 1/4h startsin: 5.7 testSteps: 1. Have three appliances with same v2 keys and different regions 2. Set one as Global and the other two as Remote and add subscribe the Remotes to the Global expectedResults: 1. 2. appliances subscribed. """ region = multi_region_cluster.global_appliance.collections.regions.instantiate() navigate_to(region.replication, "Global") for host in multi_region_cluster.remote_appliances: assert region.replication.get_replication_status( host=host.hostname ), f"{host.hostname} Remote Appliance is not found in Global Appliance's list" @pytest.mark.tier(1) def test_replication_global_region_dashboard(request, setup_replication): """ Global dashboard show remote data Polarion: assignee: dgaikwad casecomponent: Replication initialEstimate: 1/4h testSteps: 1. Have a VM created in the provider in the Remote region which is subscribed to Global. 2. Check the dashboard on the Global shows data from the Remote region. expectedResults: 1. 2. Dashboard on the Global displays data from the Remote region """ remote_app, global_app = setup_replication remote_provider = provider_app_crud(InfraProvider, remote_app) remote_provider.setup() assert remote_provider.name in remote_app.managed_provider_names, "Provider is not available." new_vm_name = fauxfactory.gen_alphanumeric(start="test_rep_dashboard", length=25).lower() vm = create_vm(provider=remote_provider, vm_name=new_vm_name) request.addfinalizer(vm.cleanup_on_provider) data_items = ('EVM: Recently Discovered Hosts', 'EVM: Recently Discovered VMs', 'Top Storage Consumers') remote_app_data, global_app_data = {}, {} def get_table_data(widget): ret = [row.name.text for row in widget.contents] logger.info("Widget text data:{%s}" % ret) return ret def data_check(view, table): return bool(get_table_data(view.dashboards("Default Dashboard").widgets(table))) view = navigate_to(remote_app.server, "Dashboard") for table_name in data_items: logger.info("Table name:{%s}" % table_name) wait_for( data_check, func_args=[view, table_name], delay=20, num_sec=600, fail_func=view.dashboards("Default Dashboard").browser.refresh, message=f"Waiting for table data item: {table_name} " ) remote_app_data[table_name] = get_table_data(view.dashboards( "Default Dashboard").widgets(table_name)) view = navigate_to(global_app.server, "Dashboard") for table_name in data_items: logger.info("Table name:{%s}" % table_name) wait_for( data_check, func_args=[view, table_name], delay=20, num_sec=600, fail_func=view.dashboards("Default Dashboard").browser.refresh, message=f"Waiting for table data item: {table_name}" ) global_app_data[table_name] = get_table_data(view.dashboards( "Default Dashboard").widgets(table_name)) # TODO(ndhandre): Widget not implemented so some widget not checking in this test case they are # 'Vendor and Guest OS Chart', 'Top Memory Consumers (weekly)', 'Top CPU Consumers (weekly)', # 'Virtual Infrastructure Platforms', 'Guest OS Information' assert are_dicts_same(remote_app_data, global_app_data), "Dashboard is not same of both app." @pytest.mark.tier(1) def test_replication_global_to_remote_new_vm_from_template(request, setup_replication): """ Create a new VM from template in remote region from global region Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/6h testSteps: 1. Configure first appliance as Global. 2. Configure second appliance as Remote, subscribed to Global. 3. Create a VM from template in Remote region using the Global appliance. expectedResults: 1. 2. 3. VM created in the Remote, no errors. """ remote_app, global_app = setup_replication remote_provider = provider_app_crud(RHEVMProvider, remote_app) remote_provider.setup() assert remote_provider.name in remote_app.managed_provider_names, "Provider is not available." new_vm_name = fauxfactory.gen_alphanumeric(start="test_replication_", length=25).lower() global_provider = provider_app_crud(RHEVMProvider, global_app) vm = create_vm(provider=global_provider, vm_name=new_vm_name) request.addfinalizer(vm.cleanup_on_provider) remote_provider.refresh_provider_relationships() assert (remote_app.collections.infra_vms.instantiate(new_vm_name, remote_provider).exists), ( f"{new_vm_name} vm is not found in Remote Appliance" ) @pytest.mark.tier(1) def test_replication_subscription_revalidation_pglogical(configured_appliance, unconfigured_appliance): """ Subscription validation passes for replication subscriptions which have been validated and successfully saved. Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: medium initialEstimate: 1/12h testSteps: 1. Attempt to validate the subscription expectedResults: 1. Validation succeeds as this subscription was successfully saved and is currently replicating """ remote_app, global_app = configured_appliance, unconfigured_appliance app_params = dict( region=99, dbhostname='localhost', username=credentials["database"]["username"], password=credentials["database"]["password"], dbname='vmdb_production', dbdisk=global_app.unpartitioned_disks[0], fetch_key=remote_app.hostname, sshlogin=credentials["ssh"]["username"], sshpass=credentials["ssh"]["password"], ) global_app.appliance_console_cli.configure_appliance_internal_fetch_key(**app_params) global_app.evmserverd.wait_for_running() global_app.wait_for_miq_ready() remote_app.set_pglogical_replication(replication_type=':remote') region = global_app.collections.regions.instantiate(number=99) region.replication.set_replication(replication_type="global", updates={"host": remote_app.hostname}, validate=True) @test_requirements.settings @test_requirements.multi_region @pytest.mark.tier(3) def test_replication_subscription_update(multi_region_cluster, setup_multi_region_cluster): """ Edit replication subscription Polarion: assignee: dgaikwad casecomponent: Configuration caseimportance: critical initialEstimate: 1/4h """ global_appliance = multi_region_cluster.global_appliance region = global_appliance.collections.regions.instantiate(number=99) # Update with bad password and verify that error flash message appears row = region.replication._global_replication_row() row[8].widget.click(handle_alert=True) view = region.replication.create_view(ReplicationGlobalAddView) view.fill({'username': 'bad_user'}) view.accept_button.click() view.action_dropdown.item_select('Validate') view.flash.assert_message("FATAL: password authentication failed", partial=True, t='error') row[8].widget.click(handle_alert=True) view.fill({'username': credentials.database.username}) view.accept_button.click() view.action_dropdown.item_select('Validate') view.flash.assert_success_message("Subscription Credentials validated successfully")
nachandr/cfme_tests
cfme/tests/test_replication.py
cfme/configure/about.py
import attr from widgetastic.exceptions import NoSuchElementException from wrapanapi.systems import VMWareSystem from cfme.common.candu_views import VMUtilizationView from cfme.common.provider import DefaultEndpoint from cfme.common.provider import DefaultEndpointForm from cfme.common.provider import VMRCEndpoint from cfme.exceptions import ItemNotFound from cfme.infrastructure.provider import InfraProvider from cfme.services.catalogs.catalog_items import VMwareCatalogItem from widgetastic_manageiq import LineChart class VirtualCenterEndpoint(DefaultEndpoint): pass class VirtualCenterEndpointForm(DefaultEndpointForm): pass class VirtualCenterVMUtilizationView(VMUtilizationView): """A VM Utilization view for virtual center providers""" vm_cpu = LineChart(id='miq_chart_parent_candu_0') vm_cpu_state = LineChart(id='miq_chart_parent_candu_1') vm_memory = LineChart(id='miq_chart_parent_candu_2') vm_disk = LineChart(id='miq_chart_parent_candu_3') vm_network = LineChart(id='miq_chart_parent_candu_4') @attr.s(eq=False) class VMwareProvider(InfraProvider): catalog_item_type = VMwareCatalogItem vm_utilization_view = VirtualCenterVMUtilizationView type_name = "virtualcenter" mgmt_class = VMWareSystem db_types = ["Vmware::InfraManager"] endpoints_form = VirtualCenterEndpointForm ems_pretty_name = 'VMware vCenter' discover_dict = {"vmware": True} settings_key = 'ems_vmware' # xpath locators for elements, to be used by selenium _console_connection_status_element = '//*[@id="connection-status"]|//*[@id="noVNC_status"]' _canvas_element = ('(//*[@id="remote-console" or @id="wmksContainer"]/canvas|' '//*[@id="noVNC_canvas"])') _ctrl_alt_del_xpath = '(//*[@id="ctrlaltdel"]|//*[@id="sendCtrlAltDelButton"])' _fullscreen_xpath = '//*[@id="fullscreen"]' bad_credentials_error_msg = 'Cannot complete login due to an incorrect user name or password.' log_name = 'vim' _console_type = '//*[@id="console-type"]' ems_events = [ ('vm_create', {'event_type': 'VmDeployedEvent', 'dest_vm_or_template_id': None}), ('vm_stop', {'event_type': 'VmPoweredOffEvent', 'vm_or_template_id': None}), ('vm_start', {'event_type': 'VmPoweredOnEvent', 'vm_or_template_id': None}), ('vm_delete', {'event_type': 'VmRemovedEvent', 'vm_or_template_id': None}) ] def deployment_helper(self, deploy_args): """ Used in utils.virtual_machines """ # Called within a dictionary update. Since we want to remove key/value pairs, return the # entire dictionary deploy_args.pop('username', None) deploy_args.pop('password', None) if "allowed_datastores" not in deploy_args and "allowed_datastores" in self.data: deploy_args['allowed_datastores'] = self.data['allowed_datastores'] return deploy_args @classmethod def from_config(cls, prov_config, prov_key, appliance=None): appliance = appliance or cls.appliance endpoints = { VirtualCenterEndpoint.name: VirtualCenterEndpoint(**prov_config['endpoints']['default']) } vmrc_endpoint_config = prov_config["endpoints"].get(VMRCEndpoint.name, {}) if vmrc_endpoint_config: endpoints[VMRCEndpoint.name] = VMRCEndpoint(**vmrc_endpoint_config) if prov_config.get('discovery_range'): start_ip = prov_config['discovery_range']['start'] end_ip = prov_config['discovery_range']['end'] else: start_ip = end_ip = prov_config.get('ipaddress') return appliance.collections.infra_providers.instantiate( prov_class=cls, name=prov_config['name'], endpoints=endpoints, zone=prov_config['server_zone'], key=prov_key, start_ip=start_ip, end_ip=end_ip) @property def view_value_mapping(self): return dict(name=self.name, prov_type='VMware vCenter') # Following methods will only work if the remote console window is open # and if selenium focused on it. These will not work if the selenium is # focused on Appliance window. def _try_element_lookup(self, xpath): try: return self.appliance.browser.widgetastic.selenium.find_element_by_xpath(xpath) except NoSuchElementException: raise ItemNotFound("Element not found on screen, is current focus on console window?") def get_console_connection_status(self): return self._try_element_lookup(self._console_connection_status_element).text def get_remote_console_canvas(self): return self._try_element_lookup(self._canvas_element) def get_console_ctrl_alt_del_btn(self): return self._try_element_lookup(self._ctrl_alt_del_xpath) def get_console_fullscreen_btn(self): return self._try_element_lookup(self._fullscreen_xpath) def get_console_type_name(self): return self._try_element_lookup(self._console_type).text
import fauxfactory import pytest from wait_for import wait_for from widgetastic.exceptions import RowNotFound from cfme import test_requirements from cfme.cloud.provider.openstack import OpenStackProvider from cfme.configure.configuration.region_settings import ReplicationGlobalAddView from cfme.configure.configuration.region_settings import ReplicationGlobalView from cfme.fixtures.cli import provider_app_crud from cfme.infrastructure.provider import InfraProvider from cfme.infrastructure.provider.rhevm import RHEVMProvider from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.conf import credentials from cfme.utils.log import logger pytestmark = [test_requirements.replication, pytest.mark.long_running] def create_vm(provider, vm_name): collection = provider.appliance.provider_based_collection(provider) try: template_name = provider.data['templates']['full_template']['name'] except KeyError: pytest.skip(f'Unable to identify full_template for provider: {provider}') vm = collection.instantiate( vm_name, provider, template_name=template_name ) vm.create_on_provider(find_in_cfme=True, allow_skip="default") return vm def are_dicts_same(dict1, dict2): logger.info(f"Comparing two dictionaries\n dict1:{dict1}\n dict2:{dict2}") if set(dict1) != set(dict2): return False for key in dict1.keys(): if set(dict1[key]) != set(dict2[key]): return False return True @pytest.fixture def setup_replication(configured_appliance, unconfigured_appliance): """Configure global_app database with region number 99 and subscribe to remote_app.""" remote_app, global_app = configured_appliance, unconfigured_appliance app_params = dict( region=99, dbhostname='localhost', username=credentials["database"]["username"], password=credentials["database"]["password"], dbname='vmdb_production', dbdisk=global_app.unpartitioned_disks[0], fetch_key=remote_app.hostname, sshlogin=credentials["ssh"]["username"], sshpass=credentials["ssh"]["password"], ) global_app.appliance_console_cli.configure_appliance_internal_fetch_key(**app_params) global_app.evmserverd.wait_for_running() global_app.wait_for_miq_ready() remote_app.set_pglogical_replication(replication_type=':remote') global_app.set_pglogical_replication(replication_type=':global') global_app.add_pglogical_replication_subscription(remote_app.hostname) return configured_appliance, unconfigured_appliance @pytest.mark.provider([OpenStackProvider]) def test_replication_powertoggle(request, provider, setup_replication, small_template): """ power toggle from global to remote Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/12h testSteps: 1. Have a VM created in the provider in the Remote region subscribed to Global. 2. Turn the VM off using the Global appliance. 3. Turn the VM on using the Global appliance. expectedResults: 1. 2. VM state changes to off in the Remote and Global appliance. 3. VM state changes to on in the Remote and Global appliance. """ instance_name = fauxfactory.gen_alphanumeric(start="test_replication_", length=25).lower() remote_app, global_app = setup_replication provider.appliance = remote_app provider.setup() remote_instance = remote_app.collections.cloud_instances.instantiate( instance_name, provider, small_template.name ) global_instance = global_app.collections.cloud_instances.instantiate(instance_name, provider) # Create instance remote_instance.create_on_provider(find_in_cfme=True) request.addfinalizer(remote_instance.cleanup_on_provider) remote_instance.wait_for_instance_state_change(desired_state=remote_instance.STATE_ON) # Power OFF instance using global appliance global_instance.power_control_from_cfme(option=global_instance.STOP) # Assert instance power off state from both remote and global appliance assert global_instance.wait_for_instance_state_change( desired_state=global_instance.STATE_OFF ).out assert remote_instance.wait_for_instance_state_change( desired_state=remote_instance.STATE_OFF ).out # Power ON instance using global appliance global_instance.power_control_from_cfme(option=global_instance.START) # Assert instance power ON state from both remote and global appliance assert global_instance.wait_for_instance_state_change( desired_state=global_instance.STATE_ON ).out assert remote_instance.wait_for_instance_state_change( desired_state=global_instance.STATE_ON ).out @pytest.mark.tier(2) def test_replication_appliance_add_single_subscription(setup_replication): """ Add one remote subscription to global region Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/12h startsin: 5.7 testSteps: 1. Configure first appliance as Global. 2. Configure second appliance as Remote, subscribed to Global. expectedResults: 1. 2. No error. Appliance subscribed. """ remote_app, global_app = setup_replication region = global_app.collections.regions.instantiate() assert region.replication.get_replication_status(host=remote_app.hostname) @pytest.mark.tier(3) def test_replication_re_add_deleted_remote(setup_replication): """ Re-add deleted remote region Polarion: assignee: dgaikwad casecomponent: Replication initialEstimate: 1/12h testSteps: 1. Have A Remote subscribed to Global. 2. Remove the Remote subscription from Global. 3. Add the Remote to Global again expectedResults: 1. 2. Subscription is successfully removed. 3. No error. Appliance subscribed. """ remote_app, global_app = setup_replication region = global_app.collections.regions.instantiate() # Remove the Remote subscription from Global and make sure it is removed region.replication.remove_global_appliance(host=remote_app.hostname) with pytest.raises(RowNotFound): region.replication.get_replication_status(host=remote_app.hostname) # Add the Remote to Global again global_app.set_pglogical_replication(replication_type=":global") global_app.add_pglogical_replication_subscription(remote_app.hostname) # Assert the hostname is present view = region.replication.create_view(ReplicationGlobalView) view.browser.refresh() assert region.replication.get_replication_status(host=remote_app.hostname) @pytest.mark.tier(3) def test_replication_delete_remote_from_global(setup_replication): """ Delete remote subscription from global region Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/5h testSteps: 1. Have A Remote subscribed to Global. 2. Remove the Remote subscription from Global. expectedResults: 1. 2. No error. Appliance unsubscribed. """ remote_app, global_app = setup_replication region = global_app.collections.regions.instantiate() # Remove the Remote subscription from Global region.replication.remove_global_appliance(host=remote_app.hostname) with pytest.raises(RowNotFound): region.replication.get_replication_status(host=remote_app.hostname) @pytest.mark.tier(1) def test_replication_remote_to_global_by_ip_pglogical(setup_replication): """ Test replication from remote region to global using any data type (provider,event,etc) Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/4h startsin: 5.6 testSteps: 1. Have A Remote subscribed to Global. 2. Create a provider in remote region. 3. Check the provider appeared in the Global. expectedResults: 1. 2. 3. Provider appeared in the Global. """ remote_app, global_app = setup_replication provider = provider_app_crud(OpenStackProvider, remote_app) provider.setup() # Assert the provider is replicated to global appliance assert provider.name in global_app.managed_provider_names, "Provider name not found" @pytest.mark.tier(1) def test_replication_appliance_set_type_global_ui(configured_appliance, unconfigured_appliance): """ Set appliance replication type to "Global" and add subscription in the UI Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/6h testtype: functional testSteps: 1. Have two appliances with same v2 keys and different regions 2. Set one as Global and the other as Remote and add subscribe the Remote to the Global expectedResults: 1. 2. No error, appliance subscribed. """ remote_app, global_app = configured_appliance, unconfigured_appliance app_params = dict( region=99, dbhostname='localhost', username=credentials["database"]["username"], password=credentials["database"]["password"], dbname='vmdb_production', dbdisk=global_app.unpartitioned_disks[0], fetch_key=remote_app.hostname, sshlogin=credentials["ssh"]["username"], sshpass=credentials["ssh"]["password"], ) global_app.appliance_console_cli.configure_appliance_internal_fetch_key(**app_params) global_app.evmserverd.wait_for_running() global_app.wait_for_miq_ready() # Making configured app to Remote Appliance using UI remote_region = remote_app.collections.regions.instantiate() remote_region.replication.set_replication(replication_type="remote") # Adding Remote Appliance into Global appliance using UI global_region = global_app.collections.regions.instantiate(number=99) global_region.replication.set_replication( replication_type="global", updates={"host": remote_app.hostname}, validate=True) # Validating replication assert global_region.replication.get_replication_status( host=remote_app.hostname), "Replication is not started." @pytest.mark.tier(2) @pytest.mark.parametrize("temp_appliances_unconfig_modscope_rhevm", [3], indirect=True) def test_replication_appliance_add_multi_subscription(request, setup_multi_region_cluster, multi_region_cluster, temp_appliances_unconfig_modscope_rhevm): """ add two or more subscriptions to global Polarion: assignee: dgaikwad casecomponent: Replication initialEstimate: 1/4h startsin: 5.7 testSteps: 1. Have three appliances with same v2 keys and different regions 2. Set one as Global and the other two as Remote and add subscribe the Remotes to the Global expectedResults: 1. 2. appliances subscribed. """ region = multi_region_cluster.global_appliance.collections.regions.instantiate() navigate_to(region.replication, "Global") for host in multi_region_cluster.remote_appliances: assert region.replication.get_replication_status( host=host.hostname ), f"{host.hostname} Remote Appliance is not found in Global Appliance's list" @pytest.mark.tier(1) def test_replication_global_region_dashboard(request, setup_replication): """ Global dashboard show remote data Polarion: assignee: dgaikwad casecomponent: Replication initialEstimate: 1/4h testSteps: 1. Have a VM created in the provider in the Remote region which is subscribed to Global. 2. Check the dashboard on the Global shows data from the Remote region. expectedResults: 1. 2. Dashboard on the Global displays data from the Remote region """ remote_app, global_app = setup_replication remote_provider = provider_app_crud(InfraProvider, remote_app) remote_provider.setup() assert remote_provider.name in remote_app.managed_provider_names, "Provider is not available." new_vm_name = fauxfactory.gen_alphanumeric(start="test_rep_dashboard", length=25).lower() vm = create_vm(provider=remote_provider, vm_name=new_vm_name) request.addfinalizer(vm.cleanup_on_provider) data_items = ('EVM: Recently Discovered Hosts', 'EVM: Recently Discovered VMs', 'Top Storage Consumers') remote_app_data, global_app_data = {}, {} def get_table_data(widget): ret = [row.name.text for row in widget.contents] logger.info("Widget text data:{%s}" % ret) return ret def data_check(view, table): return bool(get_table_data(view.dashboards("Default Dashboard").widgets(table))) view = navigate_to(remote_app.server, "Dashboard") for table_name in data_items: logger.info("Table name:{%s}" % table_name) wait_for( data_check, func_args=[view, table_name], delay=20, num_sec=600, fail_func=view.dashboards("Default Dashboard").browser.refresh, message=f"Waiting for table data item: {table_name} " ) remote_app_data[table_name] = get_table_data(view.dashboards( "Default Dashboard").widgets(table_name)) view = navigate_to(global_app.server, "Dashboard") for table_name in data_items: logger.info("Table name:{%s}" % table_name) wait_for( data_check, func_args=[view, table_name], delay=20, num_sec=600, fail_func=view.dashboards("Default Dashboard").browser.refresh, message=f"Waiting for table data item: {table_name}" ) global_app_data[table_name] = get_table_data(view.dashboards( "Default Dashboard").widgets(table_name)) # TODO(ndhandre): Widget not implemented so some widget not checking in this test case they are # 'Vendor and Guest OS Chart', 'Top Memory Consumers (weekly)', 'Top CPU Consumers (weekly)', # 'Virtual Infrastructure Platforms', 'Guest OS Information' assert are_dicts_same(remote_app_data, global_app_data), "Dashboard is not same of both app." @pytest.mark.tier(1) def test_replication_global_to_remote_new_vm_from_template(request, setup_replication): """ Create a new VM from template in remote region from global region Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/6h testSteps: 1. Configure first appliance as Global. 2. Configure second appliance as Remote, subscribed to Global. 3. Create a VM from template in Remote region using the Global appliance. expectedResults: 1. 2. 3. VM created in the Remote, no errors. """ remote_app, global_app = setup_replication remote_provider = provider_app_crud(RHEVMProvider, remote_app) remote_provider.setup() assert remote_provider.name in remote_app.managed_provider_names, "Provider is not available." new_vm_name = fauxfactory.gen_alphanumeric(start="test_replication_", length=25).lower() global_provider = provider_app_crud(RHEVMProvider, global_app) vm = create_vm(provider=global_provider, vm_name=new_vm_name) request.addfinalizer(vm.cleanup_on_provider) remote_provider.refresh_provider_relationships() assert (remote_app.collections.infra_vms.instantiate(new_vm_name, remote_provider).exists), ( f"{new_vm_name} vm is not found in Remote Appliance" ) @pytest.mark.tier(1) def test_replication_subscription_revalidation_pglogical(configured_appliance, unconfigured_appliance): """ Subscription validation passes for replication subscriptions which have been validated and successfully saved. Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: medium initialEstimate: 1/12h testSteps: 1. Attempt to validate the subscription expectedResults: 1. Validation succeeds as this subscription was successfully saved and is currently replicating """ remote_app, global_app = configured_appliance, unconfigured_appliance app_params = dict( region=99, dbhostname='localhost', username=credentials["database"]["username"], password=credentials["database"]["password"], dbname='vmdb_production', dbdisk=global_app.unpartitioned_disks[0], fetch_key=remote_app.hostname, sshlogin=credentials["ssh"]["username"], sshpass=credentials["ssh"]["password"], ) global_app.appliance_console_cli.configure_appliance_internal_fetch_key(**app_params) global_app.evmserverd.wait_for_running() global_app.wait_for_miq_ready() remote_app.set_pglogical_replication(replication_type=':remote') region = global_app.collections.regions.instantiate(number=99) region.replication.set_replication(replication_type="global", updates={"host": remote_app.hostname}, validate=True) @test_requirements.settings @test_requirements.multi_region @pytest.mark.tier(3) def test_replication_subscription_update(multi_region_cluster, setup_multi_region_cluster): """ Edit replication subscription Polarion: assignee: dgaikwad casecomponent: Configuration caseimportance: critical initialEstimate: 1/4h """ global_appliance = multi_region_cluster.global_appliance region = global_appliance.collections.regions.instantiate(number=99) # Update with bad password and verify that error flash message appears row = region.replication._global_replication_row() row[8].widget.click(handle_alert=True) view = region.replication.create_view(ReplicationGlobalAddView) view.fill({'username': 'bad_user'}) view.accept_button.click() view.action_dropdown.item_select('Validate') view.flash.assert_message("FATAL: password authentication failed", partial=True, t='error') row[8].widget.click(handle_alert=True) view.fill({'username': credentials.database.username}) view.accept_button.click() view.action_dropdown.item_select('Validate') view.flash.assert_success_message("Subscription Credentials validated successfully")
nachandr/cfme_tests
cfme/tests/test_replication.py
cfme/infrastructure/provider/virtualcenter.py
import attr from cfme.utils.appliance.plugin import AppliancePlugin from cfme.utils.appliance.plugin import AppliancePluginException from cfme.utils.log import logger_wrap from cfme.utils.quote import quote from cfme.utils.wait import wait_for class SystemdException(AppliancePluginException): pass @attr.s class SystemdService(AppliancePlugin): unit_name = attr.ib(type=str) @logger_wrap('SystemdService command runner: {}') def _run_service_command( self, command, expected_exit_code=None, unit_name=None, log_callback=None ): """Wrapper around running the command and raising exception on unexpected code Args: command: string command for systemd (stop, start, restart, etc) expected_exit_code: the exit code to expect, otherwise raise unit_name: optional unit name, defaults to self.unit_name attribute log_callback: logger to log against Raises: SystemdException: When expected_exit_code is not matched """ unit = self.unit_name if unit_name is None else unit_name with self.appliance.ssh_client as ssh: cmd = 'systemctl {} {}'.format(quote(command), quote(unit)) log_callback(f'Running {cmd}') result = ssh.run_command(cmd, container=self.appliance.ansible_pod_name) if expected_exit_code is not None and result.rc != expected_exit_code: # TODO: Bring back address msg = 'Failed to {} {}\nError: {}'.format( command, self.unit_name, result.output) if log_callback: log_callback(msg) else: self.logger.error(msg) raise SystemdException(msg) return result def stop(self, log_callback=None): return self._run_service_command( 'stop', expected_exit_code=0, log_callback=log_callback ) def start(self, log_callback=None): return self._run_service_command( 'start', expected_exit_code=0, log_callback=log_callback ) def restart(self, log_callback=None): return self._run_service_command( 'restart', expected_exit_code=0, log_callback=log_callback ) def reload(self, log_callback=None): return self._run_service_command( 'reload', expected_exit_code=0, log_callback=log_callback ) def enable(self, log_callback=None): return self._run_service_command( 'enable', expected_exit_code=0, log_callback=log_callback ) @property def enabled(self): return self._run_service_command('is-enabled').rc == 0 @property def is_active(self): return self._run_service_command('is-active').rc == 0 @property def running(self): return self._run_service_command("status").rc == 0 def wait_for_running(self, timeout=600): result, wait = wait_for( lambda: self.running, num_sec=timeout, fail_condition=False, delay=5, ) return result def daemon_reload(self, log_callback=None): """Call daemon-reload, no unit name for this""" return self._run_service_command( command='daemon-reload', expected_exit_code=0, unit_name='', log_callback=log_callback )
import fauxfactory import pytest from wait_for import wait_for from widgetastic.exceptions import RowNotFound from cfme import test_requirements from cfme.cloud.provider.openstack import OpenStackProvider from cfme.configure.configuration.region_settings import ReplicationGlobalAddView from cfme.configure.configuration.region_settings import ReplicationGlobalView from cfme.fixtures.cli import provider_app_crud from cfme.infrastructure.provider import InfraProvider from cfme.infrastructure.provider.rhevm import RHEVMProvider from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.conf import credentials from cfme.utils.log import logger pytestmark = [test_requirements.replication, pytest.mark.long_running] def create_vm(provider, vm_name): collection = provider.appliance.provider_based_collection(provider) try: template_name = provider.data['templates']['full_template']['name'] except KeyError: pytest.skip(f'Unable to identify full_template for provider: {provider}') vm = collection.instantiate( vm_name, provider, template_name=template_name ) vm.create_on_provider(find_in_cfme=True, allow_skip="default") return vm def are_dicts_same(dict1, dict2): logger.info(f"Comparing two dictionaries\n dict1:{dict1}\n dict2:{dict2}") if set(dict1) != set(dict2): return False for key in dict1.keys(): if set(dict1[key]) != set(dict2[key]): return False return True @pytest.fixture def setup_replication(configured_appliance, unconfigured_appliance): """Configure global_app database with region number 99 and subscribe to remote_app.""" remote_app, global_app = configured_appliance, unconfigured_appliance app_params = dict( region=99, dbhostname='localhost', username=credentials["database"]["username"], password=credentials["database"]["password"], dbname='vmdb_production', dbdisk=global_app.unpartitioned_disks[0], fetch_key=remote_app.hostname, sshlogin=credentials["ssh"]["username"], sshpass=credentials["ssh"]["password"], ) global_app.appliance_console_cli.configure_appliance_internal_fetch_key(**app_params) global_app.evmserverd.wait_for_running() global_app.wait_for_miq_ready() remote_app.set_pglogical_replication(replication_type=':remote') global_app.set_pglogical_replication(replication_type=':global') global_app.add_pglogical_replication_subscription(remote_app.hostname) return configured_appliance, unconfigured_appliance @pytest.mark.provider([OpenStackProvider]) def test_replication_powertoggle(request, provider, setup_replication, small_template): """ power toggle from global to remote Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/12h testSteps: 1. Have a VM created in the provider in the Remote region subscribed to Global. 2. Turn the VM off using the Global appliance. 3. Turn the VM on using the Global appliance. expectedResults: 1. 2. VM state changes to off in the Remote and Global appliance. 3. VM state changes to on in the Remote and Global appliance. """ instance_name = fauxfactory.gen_alphanumeric(start="test_replication_", length=25).lower() remote_app, global_app = setup_replication provider.appliance = remote_app provider.setup() remote_instance = remote_app.collections.cloud_instances.instantiate( instance_name, provider, small_template.name ) global_instance = global_app.collections.cloud_instances.instantiate(instance_name, provider) # Create instance remote_instance.create_on_provider(find_in_cfme=True) request.addfinalizer(remote_instance.cleanup_on_provider) remote_instance.wait_for_instance_state_change(desired_state=remote_instance.STATE_ON) # Power OFF instance using global appliance global_instance.power_control_from_cfme(option=global_instance.STOP) # Assert instance power off state from both remote and global appliance assert global_instance.wait_for_instance_state_change( desired_state=global_instance.STATE_OFF ).out assert remote_instance.wait_for_instance_state_change( desired_state=remote_instance.STATE_OFF ).out # Power ON instance using global appliance global_instance.power_control_from_cfme(option=global_instance.START) # Assert instance power ON state from both remote and global appliance assert global_instance.wait_for_instance_state_change( desired_state=global_instance.STATE_ON ).out assert remote_instance.wait_for_instance_state_change( desired_state=global_instance.STATE_ON ).out @pytest.mark.tier(2) def test_replication_appliance_add_single_subscription(setup_replication): """ Add one remote subscription to global region Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/12h startsin: 5.7 testSteps: 1. Configure first appliance as Global. 2. Configure second appliance as Remote, subscribed to Global. expectedResults: 1. 2. No error. Appliance subscribed. """ remote_app, global_app = setup_replication region = global_app.collections.regions.instantiate() assert region.replication.get_replication_status(host=remote_app.hostname) @pytest.mark.tier(3) def test_replication_re_add_deleted_remote(setup_replication): """ Re-add deleted remote region Polarion: assignee: dgaikwad casecomponent: Replication initialEstimate: 1/12h testSteps: 1. Have A Remote subscribed to Global. 2. Remove the Remote subscription from Global. 3. Add the Remote to Global again expectedResults: 1. 2. Subscription is successfully removed. 3. No error. Appliance subscribed. """ remote_app, global_app = setup_replication region = global_app.collections.regions.instantiate() # Remove the Remote subscription from Global and make sure it is removed region.replication.remove_global_appliance(host=remote_app.hostname) with pytest.raises(RowNotFound): region.replication.get_replication_status(host=remote_app.hostname) # Add the Remote to Global again global_app.set_pglogical_replication(replication_type=":global") global_app.add_pglogical_replication_subscription(remote_app.hostname) # Assert the hostname is present view = region.replication.create_view(ReplicationGlobalView) view.browser.refresh() assert region.replication.get_replication_status(host=remote_app.hostname) @pytest.mark.tier(3) def test_replication_delete_remote_from_global(setup_replication): """ Delete remote subscription from global region Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/5h testSteps: 1. Have A Remote subscribed to Global. 2. Remove the Remote subscription from Global. expectedResults: 1. 2. No error. Appliance unsubscribed. """ remote_app, global_app = setup_replication region = global_app.collections.regions.instantiate() # Remove the Remote subscription from Global region.replication.remove_global_appliance(host=remote_app.hostname) with pytest.raises(RowNotFound): region.replication.get_replication_status(host=remote_app.hostname) @pytest.mark.tier(1) def test_replication_remote_to_global_by_ip_pglogical(setup_replication): """ Test replication from remote region to global using any data type (provider,event,etc) Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/4h startsin: 5.6 testSteps: 1. Have A Remote subscribed to Global. 2. Create a provider in remote region. 3. Check the provider appeared in the Global. expectedResults: 1. 2. 3. Provider appeared in the Global. """ remote_app, global_app = setup_replication provider = provider_app_crud(OpenStackProvider, remote_app) provider.setup() # Assert the provider is replicated to global appliance assert provider.name in global_app.managed_provider_names, "Provider name not found" @pytest.mark.tier(1) def test_replication_appliance_set_type_global_ui(configured_appliance, unconfigured_appliance): """ Set appliance replication type to "Global" and add subscription in the UI Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/6h testtype: functional testSteps: 1. Have two appliances with same v2 keys and different regions 2. Set one as Global and the other as Remote and add subscribe the Remote to the Global expectedResults: 1. 2. No error, appliance subscribed. """ remote_app, global_app = configured_appliance, unconfigured_appliance app_params = dict( region=99, dbhostname='localhost', username=credentials["database"]["username"], password=credentials["database"]["password"], dbname='vmdb_production', dbdisk=global_app.unpartitioned_disks[0], fetch_key=remote_app.hostname, sshlogin=credentials["ssh"]["username"], sshpass=credentials["ssh"]["password"], ) global_app.appliance_console_cli.configure_appliance_internal_fetch_key(**app_params) global_app.evmserverd.wait_for_running() global_app.wait_for_miq_ready() # Making configured app to Remote Appliance using UI remote_region = remote_app.collections.regions.instantiate() remote_region.replication.set_replication(replication_type="remote") # Adding Remote Appliance into Global appliance using UI global_region = global_app.collections.regions.instantiate(number=99) global_region.replication.set_replication( replication_type="global", updates={"host": remote_app.hostname}, validate=True) # Validating replication assert global_region.replication.get_replication_status( host=remote_app.hostname), "Replication is not started." @pytest.mark.tier(2) @pytest.mark.parametrize("temp_appliances_unconfig_modscope_rhevm", [3], indirect=True) def test_replication_appliance_add_multi_subscription(request, setup_multi_region_cluster, multi_region_cluster, temp_appliances_unconfig_modscope_rhevm): """ add two or more subscriptions to global Polarion: assignee: dgaikwad casecomponent: Replication initialEstimate: 1/4h startsin: 5.7 testSteps: 1. Have three appliances with same v2 keys and different regions 2. Set one as Global and the other two as Remote and add subscribe the Remotes to the Global expectedResults: 1. 2. appliances subscribed. """ region = multi_region_cluster.global_appliance.collections.regions.instantiate() navigate_to(region.replication, "Global") for host in multi_region_cluster.remote_appliances: assert region.replication.get_replication_status( host=host.hostname ), f"{host.hostname} Remote Appliance is not found in Global Appliance's list" @pytest.mark.tier(1) def test_replication_global_region_dashboard(request, setup_replication): """ Global dashboard show remote data Polarion: assignee: dgaikwad casecomponent: Replication initialEstimate: 1/4h testSteps: 1. Have a VM created in the provider in the Remote region which is subscribed to Global. 2. Check the dashboard on the Global shows data from the Remote region. expectedResults: 1. 2. Dashboard on the Global displays data from the Remote region """ remote_app, global_app = setup_replication remote_provider = provider_app_crud(InfraProvider, remote_app) remote_provider.setup() assert remote_provider.name in remote_app.managed_provider_names, "Provider is not available." new_vm_name = fauxfactory.gen_alphanumeric(start="test_rep_dashboard", length=25).lower() vm = create_vm(provider=remote_provider, vm_name=new_vm_name) request.addfinalizer(vm.cleanup_on_provider) data_items = ('EVM: Recently Discovered Hosts', 'EVM: Recently Discovered VMs', 'Top Storage Consumers') remote_app_data, global_app_data = {}, {} def get_table_data(widget): ret = [row.name.text for row in widget.contents] logger.info("Widget text data:{%s}" % ret) return ret def data_check(view, table): return bool(get_table_data(view.dashboards("Default Dashboard").widgets(table))) view = navigate_to(remote_app.server, "Dashboard") for table_name in data_items: logger.info("Table name:{%s}" % table_name) wait_for( data_check, func_args=[view, table_name], delay=20, num_sec=600, fail_func=view.dashboards("Default Dashboard").browser.refresh, message=f"Waiting for table data item: {table_name} " ) remote_app_data[table_name] = get_table_data(view.dashboards( "Default Dashboard").widgets(table_name)) view = navigate_to(global_app.server, "Dashboard") for table_name in data_items: logger.info("Table name:{%s}" % table_name) wait_for( data_check, func_args=[view, table_name], delay=20, num_sec=600, fail_func=view.dashboards("Default Dashboard").browser.refresh, message=f"Waiting for table data item: {table_name}" ) global_app_data[table_name] = get_table_data(view.dashboards( "Default Dashboard").widgets(table_name)) # TODO(ndhandre): Widget not implemented so some widget not checking in this test case they are # 'Vendor and Guest OS Chart', 'Top Memory Consumers (weekly)', 'Top CPU Consumers (weekly)', # 'Virtual Infrastructure Platforms', 'Guest OS Information' assert are_dicts_same(remote_app_data, global_app_data), "Dashboard is not same of both app." @pytest.mark.tier(1) def test_replication_global_to_remote_new_vm_from_template(request, setup_replication): """ Create a new VM from template in remote region from global region Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: critical initialEstimate: 1/6h testSteps: 1. Configure first appliance as Global. 2. Configure second appliance as Remote, subscribed to Global. 3. Create a VM from template in Remote region using the Global appliance. expectedResults: 1. 2. 3. VM created in the Remote, no errors. """ remote_app, global_app = setup_replication remote_provider = provider_app_crud(RHEVMProvider, remote_app) remote_provider.setup() assert remote_provider.name in remote_app.managed_provider_names, "Provider is not available." new_vm_name = fauxfactory.gen_alphanumeric(start="test_replication_", length=25).lower() global_provider = provider_app_crud(RHEVMProvider, global_app) vm = create_vm(provider=global_provider, vm_name=new_vm_name) request.addfinalizer(vm.cleanup_on_provider) remote_provider.refresh_provider_relationships() assert (remote_app.collections.infra_vms.instantiate(new_vm_name, remote_provider).exists), ( f"{new_vm_name} vm is not found in Remote Appliance" ) @pytest.mark.tier(1) def test_replication_subscription_revalidation_pglogical(configured_appliance, unconfigured_appliance): """ Subscription validation passes for replication subscriptions which have been validated and successfully saved. Polarion: assignee: dgaikwad casecomponent: Replication caseimportance: medium initialEstimate: 1/12h testSteps: 1. Attempt to validate the subscription expectedResults: 1. Validation succeeds as this subscription was successfully saved and is currently replicating """ remote_app, global_app = configured_appliance, unconfigured_appliance app_params = dict( region=99, dbhostname='localhost', username=credentials["database"]["username"], password=credentials["database"]["password"], dbname='vmdb_production', dbdisk=global_app.unpartitioned_disks[0], fetch_key=remote_app.hostname, sshlogin=credentials["ssh"]["username"], sshpass=credentials["ssh"]["password"], ) global_app.appliance_console_cli.configure_appliance_internal_fetch_key(**app_params) global_app.evmserverd.wait_for_running() global_app.wait_for_miq_ready() remote_app.set_pglogical_replication(replication_type=':remote') region = global_app.collections.regions.instantiate(number=99) region.replication.set_replication(replication_type="global", updates={"host": remote_app.hostname}, validate=True) @test_requirements.settings @test_requirements.multi_region @pytest.mark.tier(3) def test_replication_subscription_update(multi_region_cluster, setup_multi_region_cluster): """ Edit replication subscription Polarion: assignee: dgaikwad casecomponent: Configuration caseimportance: critical initialEstimate: 1/4h """ global_appliance = multi_region_cluster.global_appliance region = global_appliance.collections.regions.instantiate(number=99) # Update with bad password and verify that error flash message appears row = region.replication._global_replication_row() row[8].widget.click(handle_alert=True) view = region.replication.create_view(ReplicationGlobalAddView) view.fill({'username': 'bad_user'}) view.accept_button.click() view.action_dropdown.item_select('Validate') view.flash.assert_message("FATAL: password authentication failed", partial=True, t='error') row[8].widget.click(handle_alert=True) view.fill({'username': credentials.database.username}) view.accept_button.click() view.action_dropdown.item_select('Validate') view.flash.assert_success_message("Subscription Credentials validated successfully")
nachandr/cfme_tests
cfme/tests/test_replication.py
cfme/utils/appliance/services.py
""" Docstrings for generated ufuncs The syntax is designed to look like the function add_newdoc is being called from numpy.lib, but in this file add_newdoc puts the docstrings in a dictionary. This dictionary is used in numpy/core/code_generators/generate_umath.py to generate the docstrings for the ufuncs in numpy.core at the C level when the ufuncs are created at compile time. """ import textwrap docdict = {} def get(name): return docdict.get(name) # common parameter text to all ufuncs subst = { 'PARAMS': textwrap.dedent(""" out : ndarray, None, or tuple of ndarray and None, optional A location into which the result is stored. If provided, it must have a shape that the inputs broadcast to. If not provided or None, a freshly-allocated array is returned. A tuple (possible only as a keyword argument) must have length equal to the number of outputs. where : array_like, optional This condition is broadcast over the input. At locations where the condition is True, the `out` array will be set to the ufunc result. Elsewhere, the `out` array will retain its original value. Note that if an uninitialized `out` array is created via the default ``out=None``, locations within it where the condition is False will remain uninitialized. **kwargs For other keyword-only arguments, see the :ref:`ufunc docs <ufuncs.kwargs>`. """).strip(), 'BROADCASTABLE_2': ("If ``x1.shape != x2.shape``, they must be " "broadcastable to a common\n shape (which becomes " "the shape of the output)."), 'OUT_SCALAR_1': "This is a scalar if `x` is a scalar.", 'OUT_SCALAR_2': "This is a scalar if both `x1` and `x2` are scalars.", } def add_newdoc(place, name, doc): doc = textwrap.dedent(doc).strip() skip = ( # gufuncs do not use the OUT_SCALAR replacement strings 'matmul', # clip has 3 inputs, which is not handled by this 'clip', ) if name[0] != '_' and name not in skip: if '\nx :' in doc: assert '$OUT_SCALAR_1' in doc, "in {}".format(name) elif '\nx2 :' in doc or '\nx1, x2 :' in doc: assert '$OUT_SCALAR_2' in doc, "in {}".format(name) else: assert False, "Could not detect number of inputs in {}".format(name) for k, v in subst.items(): doc = doc.replace('$' + k, v) docdict['.'.join((place, name))] = doc add_newdoc('numpy.core.umath', 'absolute', """ Calculate the absolute value element-wise. ``np.abs`` is a shorthand for this function. Parameters ---------- x : array_like Input array. $PARAMS Returns ------- absolute : ndarray An ndarray containing the absolute value of each element in `x`. For complex input, ``a + ib``, the absolute value is :math:`\\sqrt{ a^2 + b^2 }`. $OUT_SCALAR_1 Examples -------- >>> x = np.array([-1.2, 1.2]) >>> np.absolute(x) array([ 1.2, 1.2]) >>> np.absolute(1.2 + 1j) 1.5620499351813308 Plot the function over ``[-10, 10]``: >>> import matplotlib.pyplot as plt >>> x = np.linspace(start=-10, stop=10, num=101) >>> plt.plot(x, np.absolute(x)) >>> plt.show() Plot the function over the complex plane: >>> xx = x + 1j * x[:, np.newaxis] >>> plt.imshow(np.abs(xx), extent=[-10, 10, -10, 10], cmap='gray') >>> plt.show() The `abs` function can be used as a shorthand for ``np.absolute`` on ndarrays. >>> x = np.array([-1.2, 1.2]) >>> abs(x) array([1.2, 1.2]) """) add_newdoc('numpy.core.umath', 'add', """ Add arguments element-wise. Parameters ---------- x1, x2 : array_like The arrays to be added. $BROADCASTABLE_2 $PARAMS Returns ------- add : ndarray or scalar The sum of `x1` and `x2`, element-wise. $OUT_SCALAR_2 Notes ----- Equivalent to `x1` + `x2` in terms of array broadcasting. Examples -------- >>> np.add(1.0, 4.0) 5.0 >>> x1 = np.arange(9.0).reshape((3, 3)) >>> x2 = np.arange(3.0) >>> np.add(x1, x2) array([[ 0., 2., 4.], [ 3., 5., 7.], [ 6., 8., 10.]]) The ``+`` operator can be used as a shorthand for ``np.add`` on ndarrays. >>> x1 = np.arange(9.0).reshape((3, 3)) >>> x2 = np.arange(3.0) >>> x1 + x2 array([[ 0., 2., 4.], [ 3., 5., 7.], [ 6., 8., 10.]]) """) add_newdoc('numpy.core.umath', 'arccos', """ Trigonometric inverse cosine, element-wise. The inverse of `cos` so that, if ``y = cos(x)``, then ``x = arccos(y)``. Parameters ---------- x : array_like `x`-coordinate on the unit circle. For real arguments, the domain is [-1, 1]. $PARAMS Returns ------- angle : ndarray The angle of the ray intersecting the unit circle at the given `x`-coordinate in radians [0, pi]. $OUT_SCALAR_1 See Also -------- cos, arctan, arcsin, emath.arccos Notes ----- `arccos` is a multivalued function: for each `x` there are infinitely many numbers `z` such that ``cos(z) = x``. The convention is to return the angle `z` whose real part lies in `[0, pi]`. For real-valued input data types, `arccos` always returns real output. For each value that cannot be expressed as a real number or infinity, it yields ``nan`` and sets the `invalid` floating point error flag. For complex-valued input, `arccos` is a complex analytic function that has branch cuts ``[-inf, -1]`` and `[1, inf]` and is continuous from above on the former and from below on the latter. The inverse `cos` is also known as `acos` or cos^-1. References ---------- M. Abramowitz and I.A. Stegun, "Handbook of Mathematical Functions", 10th printing, 1964, pp. 79. http://www.math.sfu.ca/~cbm/aands/ Examples -------- We expect the arccos of 1 to be 0, and of -1 to be pi: >>> np.arccos([1, -1]) array([ 0. , 3.14159265]) Plot arccos: >>> import matplotlib.pyplot as plt >>> x = np.linspace(-1, 1, num=100) >>> plt.plot(x, np.arccos(x)) >>> plt.axis('tight') >>> plt.show() """) add_newdoc('numpy.core.umath', 'arccosh', """ Inverse hyperbolic cosine, element-wise. Parameters ---------- x : array_like Input array. $PARAMS Returns ------- arccosh : ndarray Array of the same shape as `x`. $OUT_SCALAR_1 See Also -------- cosh, arcsinh, sinh, arctanh, tanh Notes ----- `arccosh` is a multivalued function: for each `x` there are infinitely many numbers `z` such that `cosh(z) = x`. The convention is to return the `z` whose imaginary part lies in ``[-pi, pi]`` and the real part in ``[0, inf]``. For real-valued input data types, `arccosh` always returns real output. For each value that cannot be expressed as a real number or infinity, it yields ``nan`` and sets the `invalid` floating point error flag. For complex-valued input, `arccosh` is a complex analytical function that has a branch cut `[-inf, 1]` and is continuous from above on it. References ---------- .. [1] M. Abramowitz and I.A. Stegun, "Handbook of Mathematical Functions", 10th printing, 1964, pp. 86. http://www.math.sfu.ca/~cbm/aands/ .. [2] Wikipedia, "Inverse hyperbolic function", https://en.wikipedia.org/wiki/Arccosh Examples -------- >>> np.arccosh([np.e, 10.0]) array([ 1.65745445, 2.99322285]) >>> np.arccosh(1) 0.0 """) add_newdoc('numpy.core.umath', 'arcsin', """ Inverse sine, element-wise. Parameters ---------- x : array_like `y`-coordinate on the unit circle. $PARAMS Returns ------- angle : ndarray The inverse sine of each element in `x`, in radians and in the closed interval ``[-pi/2, pi/2]``. $OUT_SCALAR_1 See Also -------- sin, cos, arccos, tan, arctan, arctan2, emath.arcsin Notes ----- `arcsin` is a multivalued function: for each `x` there are infinitely many numbers `z` such that :math:`sin(z) = x`. The convention is to return the angle `z` whose real part lies in [-pi/2, pi/2]. For real-valued input data types, *arcsin* always returns real output. For each value that cannot be expressed as a real number or infinity, it yields ``nan`` and sets the `invalid` floating point error flag. For complex-valued input, `arcsin` is a complex analytic function that has, by convention, the branch cuts [-inf, -1] and [1, inf] and is continuous from above on the former and from below on the latter. The inverse sine is also known as `asin` or sin^{-1}. References ---------- Abramowitz, M. and Stegun, I. A., *Handbook of Mathematical Functions*, 10th printing, New York: Dover, 1964, pp. 79ff. http://www.math.sfu.ca/~cbm/aands/ Examples -------- >>> np.arcsin(1) # pi/2 1.5707963267948966 >>> np.arcsin(-1) # -pi/2 -1.5707963267948966 >>> np.arcsin(0) 0.0 """) add_newdoc('numpy.core.umath', 'arcsinh', """ Inverse hyperbolic sine element-wise. Parameters ---------- x : array_like Input array. $PARAMS Returns ------- out : ndarray or scalar Array of the same shape as `x`. $OUT_SCALAR_1 Notes ----- `arcsinh` is a multivalued function: for each `x` there are infinitely many numbers `z` such that `sinh(z) = x`. The convention is to return the `z` whose imaginary part lies in `[-pi/2, pi/2]`. For real-valued input data types, `arcsinh` always returns real output. For each value that cannot be expressed as a real number or infinity, it returns ``nan`` and sets the `invalid` floating point error flag. For complex-valued input, `arccos` is a complex analytical function that has branch cuts `[1j, infj]` and `[-1j, -infj]` and is continuous from the right on the former and from the left on the latter. The inverse hyperbolic sine is also known as `asinh` or ``sinh^-1``. References ---------- .. [1] M. Abramowitz and I.A. Stegun, "Handbook of Mathematical Functions", 10th printing, 1964, pp. 86. http://www.math.sfu.ca/~cbm/aands/ .. [2] Wikipedia, "Inverse hyperbolic function", https://en.wikipedia.org/wiki/Arcsinh Examples -------- >>> np.arcsinh(np.array([np.e, 10.0])) array([ 1.72538256, 2.99822295]) """) add_newdoc('numpy.core.umath', 'arctan', """ Trigonometric inverse tangent, element-wise. The inverse of tan, so that if ``y = tan(x)`` then ``x = arctan(y)``. Parameters ---------- x : array_like $PARAMS Returns ------- out : ndarray or scalar Out has the same shape as `x`. Its real part is in ``[-pi/2, pi/2]`` (``arctan(+/-inf)`` returns ``+/-pi/2``). $OUT_SCALAR_1 See Also -------- arctan2 : The "four quadrant" arctan of the angle formed by (`x`, `y`) and the positive `x`-axis. angle : Argument of complex values. Notes ----- `arctan` is a multi-valued function: for each `x` there are infinitely many numbers `z` such that tan(`z`) = `x`. The convention is to return the angle `z` whose real part lies in [-pi/2, pi/2]. For real-valued input data types, `arctan` always returns real output. For each value that cannot be expressed as a real number or infinity, it yields ``nan`` and sets the `invalid` floating point error flag. For complex-valued input, `arctan` is a complex analytic function that has [``1j, infj``] and [``-1j, -infj``] as branch cuts, and is continuous from the left on the former and from the right on the latter. The inverse tangent is also known as `atan` or tan^{-1}. References ---------- Abramowitz, M. and Stegun, I. A., *Handbook of Mathematical Functions*, 10th printing, New York: Dover, 1964, pp. 79. http://www.math.sfu.ca/~cbm/aands/ Examples -------- We expect the arctan of 0 to be 0, and of 1 to be pi/4: >>> np.arctan([0, 1]) array([ 0. , 0.78539816]) >>> np.pi/4 0.78539816339744828 Plot arctan: >>> import matplotlib.pyplot as plt >>> x = np.linspace(-10, 10) >>> plt.plot(x, np.arctan(x)) >>> plt.axis('tight') >>> plt.show() """) add_newdoc('numpy.core.umath', 'arctan2', """ Element-wise arc tangent of ``x1/x2`` choosing the quadrant correctly. The quadrant (i.e., branch) is chosen so that ``arctan2(x1, x2)`` is the signed angle in radians between the ray ending at the origin and passing through the point (1,0), and the ray ending at the origin and passing through the point (`x2`, `x1`). (Note the role reversal: the "`y`-coordinate" is the first function parameter, the "`x`-coordinate" is the second.) By IEEE convention, this function is defined for `x2` = +/-0 and for either or both of `x1` and `x2` = +/-inf (see Notes for specific values). This function is not defined for complex-valued arguments; for the so-called argument of complex values, use `angle`. Parameters ---------- x1 : array_like, real-valued `y`-coordinates. x2 : array_like, real-valued `x`-coordinates. $BROADCASTABLE_2 $PARAMS Returns ------- angle : ndarray Array of angles in radians, in the range ``[-pi, pi]``. $OUT_SCALAR_2 See Also -------- arctan, tan, angle Notes ----- *arctan2* is identical to the `atan2` function of the underlying C library. The following special values are defined in the C standard: [1]_ ====== ====== ================ `x1` `x2` `arctan2(x1,x2)` ====== ====== ================ +/- 0 +0 +/- 0 +/- 0 -0 +/- pi > 0 +/-inf +0 / +pi < 0 +/-inf -0 / -pi +/-inf +inf +/- (pi/4) +/-inf -inf +/- (3*pi/4) ====== ====== ================ Note that +0 and -0 are distinct floating point numbers, as are +inf and -inf. References ---------- .. [1] ISO/IEC standard 9899:1999, "Programming language C." Examples -------- Consider four points in different quadrants: >>> x = np.array([-1, +1, +1, -1]) >>> y = np.array([-1, -1, +1, +1]) >>> np.arctan2(y, x) * 180 / np.pi array([-135., -45., 45., 135.]) Note the order of the parameters. `arctan2` is defined also when `x2` = 0 and at several other special points, obtaining values in the range ``[-pi, pi]``: >>> np.arctan2([1., -1.], [0., 0.]) array([ 1.57079633, -1.57079633]) >>> np.arctan2([0., 0., np.inf], [+0., -0., np.inf]) array([ 0. , 3.14159265, 0.78539816]) """) add_newdoc('numpy.core.umath', '_arg', """ DO NOT USE, ONLY FOR TESTING """) add_newdoc('numpy.core.umath', 'arctanh', """ Inverse hyperbolic tangent element-wise. Parameters ---------- x : array_like Input array. $PARAMS Returns ------- out : ndarray or scalar Array of the same shape as `x`. $OUT_SCALAR_1 See Also -------- emath.arctanh Notes ----- `arctanh` is a multivalued function: for each `x` there are infinitely many numbers `z` such that ``tanh(z) = x``. The convention is to return the `z` whose imaginary part lies in `[-pi/2, pi/2]`. For real-valued input data types, `arctanh` always returns real output. For each value that cannot be expressed as a real number or infinity, it yields ``nan`` and sets the `invalid` floating point error flag. For complex-valued input, `arctanh` is a complex analytical function that has branch cuts `[-1, -inf]` and `[1, inf]` and is continuous from above on the former and from below on the latter. The inverse hyperbolic tangent is also known as `atanh` or ``tanh^-1``. References ---------- .. [1] M. Abramowitz and I.A. Stegun, "Handbook of Mathematical Functions", 10th printing, 1964, pp. 86. http://www.math.sfu.ca/~cbm/aands/ .. [2] Wikipedia, "Inverse hyperbolic function", https://en.wikipedia.org/wiki/Arctanh Examples -------- >>> np.arctanh([0, -0.5]) array([ 0. , -0.54930614]) """) add_newdoc('numpy.core.umath', 'bitwise_and', """ Compute the bit-wise AND of two arrays element-wise. Computes the bit-wise AND of the underlying binary representation of the integers in the input arrays. This ufunc implements the C/Python operator ``&``. Parameters ---------- x1, x2 : array_like Only integer and boolean types are handled. $BROADCASTABLE_2 $PARAMS Returns ------- out : ndarray or scalar Result. $OUT_SCALAR_2 See Also -------- logical_and bitwise_or bitwise_xor binary_repr : Return the binary representation of the input number as a string. Examples -------- The number 13 is represented by ``00001101``. Likewise, 17 is represented by ``00010001``. The bit-wise AND of 13 and 17 is therefore ``000000001``, or 1: >>> np.bitwise_and(13, 17) 1 >>> np.bitwise_and(14, 13) 12 >>> np.binary_repr(12) '1100' >>> np.bitwise_and([14,3], 13) array([12, 1]) >>> np.bitwise_and([11,7], [4,25]) array([0, 1]) >>> np.bitwise_and(np.array([2,5,255]), np.array([3,14,16])) array([ 2, 4, 16]) >>> np.bitwise_and([True, True], [False, True]) array([False, True]) The ``&`` operator can be used as a shorthand for ``np.bitwise_and`` on ndarrays. >>> x1 = np.array([2, 5, 255]) >>> x2 = np.array([3, 14, 16]) >>> x1 & x2 array([ 2, 4, 16]) """) add_newdoc('numpy.core.umath', 'bitwise_or', """ Compute the bit-wise OR of two arrays element-wise. Computes the bit-wise OR of the underlying binary representation of the integers in the input arrays. This ufunc implements the C/Python operator ``|``. Parameters ---------- x1, x2 : array_like Only integer and boolean types are handled. $BROADCASTABLE_2 $PARAMS Returns ------- out : ndarray or scalar Result. $OUT_SCALAR_2 See Also -------- logical_or bitwise_and bitwise_xor binary_repr : Return the binary representation of the input number as a string. Examples -------- The number 13 has the binaray representation ``00001101``. Likewise, 16 is represented by ``00010000``. The bit-wise OR of 13 and 16 is then ``000111011``, or 29: >>> np.bitwise_or(13, 16) 29 >>> np.binary_repr(29) '11101' >>> np.bitwise_or(32, 2) 34 >>> np.bitwise_or([33, 4], 1) array([33, 5]) >>> np.bitwise_or([33, 4], [1, 2]) array([33, 6]) >>> np.bitwise_or(np.array([2, 5, 255]), np.array([4, 4, 4])) array([ 6, 5, 255]) >>> np.array([2, 5, 255]) | np.array([4, 4, 4]) array([ 6, 5, 255]) >>> np.bitwise_or(np.array([2, 5, 255, 2147483647], dtype=np.int32), ... np.array([4, 4, 4, 2147483647], dtype=np.int32)) array([ 6, 5, 255, 2147483647]) >>> np.bitwise_or([True, True], [False, True]) array([ True, True]) The ``|`` operator can be used as a shorthand for ``np.bitwise_or`` on ndarrays. >>> x1 = np.array([2, 5, 255]) >>> x2 = np.array([4, 4, 4]) >>> x1 | x2 array([ 6, 5, 255]) """) add_newdoc('numpy.core.umath', 'bitwise_xor', """ Compute the bit-wise XOR of two arrays element-wise. Computes the bit-wise XOR of the underlying binary representation of the integers in the input arrays. This ufunc implements the C/Python operator ``^``. Parameters ---------- x1, x2 : array_like Only integer and boolean types are handled. $BROADCASTABLE_2 $PARAMS Returns ------- out : ndarray or scalar Result. $OUT_SCALAR_2 See Also -------- logical_xor bitwise_and bitwise_or binary_repr : Return the binary representation of the input number as a string. Examples -------- The number 13 is represented by ``00001101``. Likewise, 17 is represented by ``00010001``. The bit-wise XOR of 13 and 17 is therefore ``00011100``, or 28: >>> np.bitwise_xor(13, 17) 28 >>> np.binary_repr(28) '11100' >>> np.bitwise_xor(31, 5) 26 >>> np.bitwise_xor([31,3], 5) array([26, 6]) >>> np.bitwise_xor([31,3], [5,6]) array([26, 5]) >>> np.bitwise_xor([True, True], [False, True]) array([ True, False]) The ``^`` operator can be used as a shorthand for ``np.bitwise_xor`` on ndarrays. >>> x1 = np.array([True, True]) >>> x2 = np.array([False, True]) >>> x1 ^ x2 array([ True, False]) """) add_newdoc('numpy.core.umath', 'ceil', """ Return the ceiling of the input, element-wise. The ceil of the scalar `x` is the smallest integer `i`, such that ``i >= x``. It is often denoted as :math:`\\lceil x \\rceil`. Parameters ---------- x : array_like Input data. $PARAMS Returns ------- y : ndarray or scalar The ceiling of each element in `x`, with `float` dtype. $OUT_SCALAR_1 See Also -------- floor, trunc, rint, fix Examples -------- >>> a = np.array([-1.7, -1.5, -0.2, 0.2, 1.5, 1.7, 2.0]) >>> np.ceil(a) array([-1., -1., -0., 1., 2., 2., 2.]) """) add_newdoc('numpy.core.umath', 'trunc', """ Return the truncated value of the input, element-wise. The truncated value of the scalar `x` is the nearest integer `i` which is closer to zero than `x` is. In short, the fractional part of the signed number `x` is discarded. Parameters ---------- x : array_like Input data. $PARAMS Returns ------- y : ndarray or scalar The truncated value of each element in `x`. $OUT_SCALAR_1 See Also -------- ceil, floor, rint, fix Notes ----- .. versionadded:: 1.3.0 Examples -------- >>> a = np.array([-1.7, -1.5, -0.2, 0.2, 1.5, 1.7, 2.0]) >>> np.trunc(a) array([-1., -1., -0., 0., 1., 1., 2.]) """) add_newdoc('numpy.core.umath', 'conjugate', """ Return the complex conjugate, element-wise. The complex conjugate of a complex number is obtained by changing the sign of its imaginary part. Parameters ---------- x : array_like Input value. $PARAMS Returns ------- y : ndarray The complex conjugate of `x`, with same dtype as `y`. $OUT_SCALAR_1 Notes ----- `conj` is an alias for `conjugate`: >>> np.conj is np.conjugate True Examples -------- >>> np.conjugate(1+2j) (1-2j) >>> x = np.eye(2) + 1j * np.eye(2) >>> np.conjugate(x) array([[ 1.-1.j, 0.-0.j], [ 0.-0.j, 1.-1.j]]) """) add_newdoc('numpy.core.umath', 'cos', """ Cosine element-wise. Parameters ---------- x : array_like Input array in radians. $PARAMS Returns ------- y : ndarray The corresponding cosine values. $OUT_SCALAR_1 Notes ----- If `out` is provided, the function writes the result into it, and returns a reference to `out`. (See Examples) References ---------- M. Abramowitz and I. A. Stegun, Handbook of Mathematical Functions. New York, NY: Dover, 1972. Examples -------- >>> np.cos(np.array([0, np.pi/2, np.pi])) array([ 1.00000000e+00, 6.12303177e-17, -1.00000000e+00]) >>> >>> # Example of providing the optional output parameter >>> out1 = np.array([0], dtype='d') >>> out2 = np.cos([0.1], out1) >>> out2 is out1 True >>> >>> # Example of ValueError due to provision of shape mis-matched `out` >>> np.cos(np.zeros((3,3)),np.zeros((2,2))) Traceback (most recent call last): File "<stdin>", line 1, in <module> ValueError: operands could not be broadcast together with shapes (3,3) (2,2) """) add_newdoc('numpy.core.umath', 'cosh', """ Hyperbolic cosine, element-wise. Equivalent to ``1/2 * (np.exp(x) + np.exp(-x))`` and ``np.cos(1j*x)``. Parameters ---------- x : array_like Input array. $PARAMS Returns ------- out : ndarray or scalar Output array of same shape as `x`. $OUT_SCALAR_1 Examples -------- >>> np.cosh(0) 1.0 The hyperbolic cosine describes the shape of a hanging cable: >>> import matplotlib.pyplot as plt >>> x = np.linspace(-4, 4, 1000) >>> plt.plot(x, np.cosh(x)) >>> plt.show() """) add_newdoc('numpy.core.umath', 'degrees', """ Convert angles from radians to degrees. Parameters ---------- x : array_like Input array in radians. $PARAMS Returns ------- y : ndarray of floats The corresponding degree values; if `out` was supplied this is a reference to it. $OUT_SCALAR_1 See Also -------- rad2deg : equivalent function Examples -------- Convert a radian array to degrees >>> rad = np.arange(12.)*np.pi/6 >>> np.degrees(rad) array([ 0., 30., 60., 90., 120., 150., 180., 210., 240., 270., 300., 330.]) >>> out = np.zeros((rad.shape)) >>> r = np.degrees(rad, out) >>> np.all(r == out) True """) add_newdoc('numpy.core.umath', 'rad2deg', """ Convert angles from radians to degrees. Parameters ---------- x : array_like Angle in radians. $PARAMS Returns ------- y : ndarray The corresponding angle in degrees. $OUT_SCALAR_1 See Also -------- deg2rad : Convert angles from degrees to radians. unwrap : Remove large jumps in angle by wrapping. Notes ----- .. versionadded:: 1.3.0 rad2deg(x) is ``180 * x / pi``. Examples -------- >>> np.rad2deg(np.pi/2) 90.0 """) add_newdoc('numpy.core.umath', 'heaviside', """ Compute the Heaviside step function. The Heaviside step function is defined as:: 0 if x1 < 0 heaviside(x1, x2) = x2 if x1 == 0 1 if x1 > 0 where `x2` is often taken to be 0.5, but 0 and 1 are also sometimes used. Parameters ---------- x1 : array_like Input values. x2 : array_like The value of the function when x1 is 0. $BROADCASTABLE_2 $PARAMS Returns ------- out : ndarray or scalar The output array, element-wise Heaviside step function of `x1`. $OUT_SCALAR_2 Notes ----- .. versionadded:: 1.13.0 References ---------- .. Wikipedia, "Heaviside step function", https://en.wikipedia.org/wiki/Heaviside_step_function Examples -------- >>> np.heaviside([-1.5, 0, 2.0], 0.5) array([ 0. , 0.5, 1. ]) >>> np.heaviside([-1.5, 0, 2.0], 1) array([ 0., 1., 1.]) """) add_newdoc('numpy.core.umath', 'divide', """ Divide arguments element-wise. Parameters ---------- x1 : array_like Dividend array. x2 : array_like Divisor array. $BROADCASTABLE_2 $PARAMS Returns ------- y : ndarray or scalar The quotient ``x1/x2``, element-wise. $OUT_SCALAR_2 See Also -------- seterr : Set whether to raise or warn on overflow, underflow and division by zero. Notes ----- Equivalent to ``x1`` / ``x2`` in terms of array-broadcasting. Behavior on division by zero can be changed using ``seterr``. In Python 2, when both ``x1`` and ``x2`` are of an integer type, ``divide`` will behave like ``floor_divide``. In Python 3, it behaves like ``true_divide``. Examples -------- >>> np.divide(2.0, 4.0) 0.5 >>> x1 = np.arange(9.0).reshape((3, 3)) >>> x2 = np.arange(3.0) >>> np.divide(x1, x2) array([[ NaN, 1. , 1. ], [ Inf, 4. , 2.5], [ Inf, 7. , 4. ]]) Note the behavior with integer types (Python 2 only): >>> np.divide(2, 4) 0 >>> np.divide(2, 4.) 0.5 Division by zero always yields zero in integer arithmetic (again, Python 2 only), and does not raise an exception or a warning: >>> np.divide(np.array([0, 1], dtype=int), np.array([0, 0], dtype=int)) array([0, 0]) Division by zero can, however, be caught using ``seterr``: >>> old_err_state = np.seterr(divide='raise') >>> np.divide(1, 0) Traceback (most recent call last): File "<stdin>", line 1, in <module> FloatingPointError: divide by zero encountered in divide >>> ignored_states = np.seterr(**old_err_state) >>> np.divide(1, 0) 0 The ``/`` operator can be used as a shorthand for ``np.divide`` on ndarrays. >>> x1 = np.arange(9.0).reshape((3, 3)) >>> x2 = 2 * np.ones(3) >>> x1 / x2 array([[0. , 0.5, 1. ], [1.5, 2. , 2.5], [3. , 3.5, 4. ]]) """) add_newdoc('numpy.core.umath', 'equal', """ Return (x1 == x2) element-wise. Parameters ---------- x1, x2 : array_like Input arrays. $BROADCASTABLE_2 $PARAMS Returns ------- out : ndarray or scalar Output array, element-wise comparison of `x1` and `x2`. Typically of type bool, unless ``dtype=object`` is passed. $OUT_SCALAR_2 See Also -------- not_equal, greater_equal, less_equal, greater, less Examples -------- >>> np.equal([0, 1, 3], np.arange(3)) array([ True, True, False]) What is compared are values, not types. So an int (1) and an array of length one can evaluate as True: >>> np.equal(1, np.ones(1)) array([ True]) The ``==`` operator can be used as a shorthand for ``np.equal`` on ndarrays. >>> a = np.array([2, 4, 6]) >>> b = np.array([2, 4, 2]) >>> a == b array([ True, True, False]) """) add_newdoc('numpy.core.umath', 'exp', """ Calculate the exponential of all elements in the input array. Parameters ---------- x : array_like Input values. $PARAMS Returns ------- out : ndarray or scalar Output array, element-wise exponential of `x`. $OUT_SCALAR_1 See Also -------- expm1 : Calculate ``exp(x) - 1`` for all elements in the array. exp2 : Calculate ``2**x`` for all elements in the array. Notes ----- The irrational number ``e`` is also known as Euler's number. It is approximately 2.718281, and is the base of the natural logarithm, ``ln`` (this means that, if :math:`x = \\ln y = \\log_e y`, then :math:`e^x = y`. For real input, ``exp(x)`` is always positive. For complex arguments, ``x = a + ib``, we can write :math:`e^x = e^a e^{ib}`. The first term, :math:`e^a`, is already known (it is the real argument, described above). The second term, :math:`e^{ib}`, is :math:`\\cos b + i \\sin b`, a function with magnitude 1 and a periodic phase. References ---------- .. [1] Wikipedia, "Exponential function", https://en.wikipedia.org/wiki/Exponential_function .. [2] M. Abramovitz and I. A. Stegun, "Handbook of Mathematical Functions with Formulas, Graphs, and Mathematical Tables," Dover, 1964, p. 69, http://www.math.sfu.ca/~cbm/aands/page_69.htm Examples -------- Plot the magnitude and phase of ``exp(x)`` in the complex plane: >>> import matplotlib.pyplot as plt >>> x = np.linspace(-2*np.pi, 2*np.pi, 100) >>> xx = x + 1j * x[:, np.newaxis] # a + ib over complex plane >>> out = np.exp(xx) >>> plt.subplot(121) >>> plt.imshow(np.abs(out), ... extent=[-2*np.pi, 2*np.pi, -2*np.pi, 2*np.pi], cmap='gray') >>> plt.title('Magnitude of exp(x)') >>> plt.subplot(122) >>> plt.imshow(np.angle(out), ... extent=[-2*np.pi, 2*np.pi, -2*np.pi, 2*np.pi], cmap='hsv') >>> plt.title('Phase (angle) of exp(x)') >>> plt.show() """) add_newdoc('numpy.core.umath', 'exp2', """ Calculate `2**p` for all `p` in the input array. Parameters ---------- x : array_like Input values. $PARAMS Returns ------- out : ndarray or scalar Element-wise 2 to the power `x`. $OUT_SCALAR_1 See Also -------- power Notes ----- .. versionadded:: 1.3.0 Examples -------- >>> np.exp2([2, 3]) array([ 4., 8.]) """) add_newdoc('numpy.core.umath', 'expm1', """ Calculate ``exp(x) - 1`` for all elements in the array. Parameters ---------- x : array_like Input values. $PARAMS Returns ------- out : ndarray or scalar Element-wise exponential minus one: ``out = exp(x) - 1``. $OUT_SCALAR_1 See Also -------- log1p : ``log(1 + x)``, the inverse of expm1. Notes ----- This function provides greater precision than ``exp(x) - 1`` for small values of ``x``. Examples -------- The true value of ``exp(1e-10) - 1`` is ``1.00000000005e-10`` to about 32 significant digits. This example shows the superiority of expm1 in this case. >>> np.expm1(1e-10) 1.00000000005e-10 >>> np.exp(1e-10) - 1 1.000000082740371e-10 """) add_newdoc('numpy.core.umath', 'fabs', """ Compute the absolute values element-wise. This function returns the absolute values (positive magnitude) of the data in `x`. Complex values are not handled, use `absolute` to find the absolute values of complex data. Parameters ---------- x : array_like The array of numbers for which the absolute values are required. If `x` is a scalar, the result `y` will also be a scalar. $PARAMS Returns ------- y : ndarray or scalar The absolute values of `x`, the returned values are always floats. $OUT_SCALAR_1 See Also -------- absolute : Absolute values including `complex` types. Examples -------- >>> np.fabs(-1) 1.0 >>> np.fabs([-1.2, 1.2]) array([ 1.2, 1.2]) """) add_newdoc('numpy.core.umath', 'floor', """ Return the floor of the input, element-wise. The floor of the scalar `x` is the largest integer `i`, such that `i <= x`. It is often denoted as :math:`\\lfloor x \\rfloor`. Parameters ---------- x : array_like Input data. $PARAMS Returns ------- y : ndarray or scalar The floor of each element in `x`. $OUT_SCALAR_1 See Also -------- ceil, trunc, rint, fix Notes ----- Some spreadsheet programs calculate the "floor-towards-zero", where ``floor(-2.5) == -2``. NumPy instead uses the definition of `floor` where `floor(-2.5) == -3`. The "floor-towards-zero" function is called ``fix`` in NumPy. Examples -------- >>> a = np.array([-1.7, -1.5, -0.2, 0.2, 1.5, 1.7, 2.0]) >>> np.floor(a) array([-2., -2., -1., 0., 1., 1., 2.]) """) add_newdoc('numpy.core.umath', 'floor_divide', """ Return the largest integer smaller or equal to the division of the inputs. It is equivalent to the Python ``//`` operator and pairs with the Python ``%`` (`remainder`), function so that ``a = a % b + b * (a // b)`` up to roundoff. Parameters ---------- x1 : array_like Numerator. x2 : array_like Denominator. $BROADCASTABLE_2 $PARAMS Returns ------- y : ndarray y = floor(`x1`/`x2`) $OUT_SCALAR_2 See Also -------- remainder : Remainder complementary to floor_divide. divmod : Simultaneous floor division and remainder. divide : Standard division. floor : Round a number to the nearest integer toward minus infinity. ceil : Round a number to the nearest integer toward infinity. Examples -------- >>> np.floor_divide(7,3) 2 >>> np.floor_divide([1., 2., 3., 4.], 2.5) array([ 0., 0., 1., 1.]) The ``//`` operator can be used as a shorthand for ``np.floor_divide`` on ndarrays. >>> x1 = np.array([1., 2., 3., 4.]) >>> x1 // 2.5 array([0., 0., 1., 1.]) """) add_newdoc('numpy.core.umath', 'fmod', """ Return the element-wise remainder of division. This is the NumPy implementation of the C library function fmod, the remainder has the same sign as the dividend `x1`. It is equivalent to the Matlab(TM) ``rem`` function and should not be confused with the Python modulus operator ``x1 % x2``. Parameters ---------- x1 : array_like Dividend. x2 : array_like Divisor. $BROADCASTABLE_2 $PARAMS Returns ------- y : array_like The remainder of the division of `x1` by `x2`. $OUT_SCALAR_2 See Also -------- remainder : Equivalent to the Python ``%`` operator. divide Notes ----- The result of the modulo operation for negative dividend and divisors is bound by conventions. For `fmod`, the sign of result is the sign of the dividend, while for `remainder` the sign of the result is the sign of the divisor. The `fmod` function is equivalent to the Matlab(TM) ``rem`` function. Examples -------- >>> np.fmod([-3, -2, -1, 1, 2, 3], 2) array([-1, 0, -1, 1, 0, 1]) >>> np.remainder([-3, -2, -1, 1, 2, 3], 2) array([1, 0, 1, 1, 0, 1]) >>> np.fmod([5, 3], [2, 2.]) array([ 1., 1.]) >>> a = np.arange(-3, 3).reshape(3, 2) >>> a array([[-3, -2], [-1, 0], [ 1, 2]]) >>> np.fmod(a, [2,2]) array([[-1, 0], [-1, 0], [ 1, 0]]) """) add_newdoc('numpy.core.umath', 'greater', """ Return the truth value of (x1 > x2) element-wise. Parameters ---------- x1, x2 : array_like Input arrays. $BROADCASTABLE_2 $PARAMS Returns ------- out : ndarray or scalar Output array, element-wise comparison of `x1` and `x2`. Typically of type bool, unless ``dtype=object`` is passed. $OUT_SCALAR_2 See Also -------- greater_equal, less, less_equal, equal, not_equal Examples -------- >>> np.greater([4,2],[2,2]) array([ True, False]) The ``>`` operator can be used as a shorthand for ``np.greater`` on ndarrays. >>> a = np.array([4, 2]) >>> b = np.array([2, 2]) >>> a > b array([ True, False]) """) add_newdoc('numpy.core.umath', 'greater_equal', """ Return the truth value of (x1 >= x2) element-wise. Parameters ---------- x1, x2 : array_like Input arrays. $BROADCASTABLE_2 $PARAMS Returns ------- out : bool or ndarray of bool Output array, element-wise comparison of `x1` and `x2`. Typically of type bool, unless ``dtype=object`` is passed. $OUT_SCALAR_2 See Also -------- greater, less, less_equal, equal, not_equal Examples -------- >>> np.greater_equal([4, 2, 1], [2, 2, 2]) array([ True, True, False]) The ``>=`` operator can be used as a shorthand for ``np.greater_equal`` on ndarrays. >>> a = np.array([4, 2, 1]) >>> b = np.array([2, 2, 2]) >>> a >= b array([ True, True, False]) """) add_newdoc('numpy.core.umath', 'hypot', """ Given the "legs" of a right triangle, return its hypotenuse. Equivalent to ``sqrt(x1**2 + x2**2)``, element-wise. If `x1` or `x2` is scalar_like (i.e., unambiguously cast-able to a scalar type), it is broadcast for use with each element of the other argument. (See Examples) Parameters ---------- x1, x2 : array_like Leg of the triangle(s). $BROADCASTABLE_2 $PARAMS Returns ------- z : ndarray The hypotenuse of the triangle(s). $OUT_SCALAR_2 Examples -------- >>> np.hypot(3*np.ones((3, 3)), 4*np.ones((3, 3))) array([[ 5., 5., 5.], [ 5., 5., 5.], [ 5., 5., 5.]]) Example showing broadcast of scalar_like argument: >>> np.hypot(3*np.ones((3, 3)), [4]) array([[ 5., 5., 5.], [ 5., 5., 5.], [ 5., 5., 5.]]) """) add_newdoc('numpy.core.umath', 'invert', """ Compute bit-wise inversion, or bit-wise NOT, element-wise. Computes the bit-wise NOT of the underlying binary representation of the integers in the input arrays. This ufunc implements the C/Python operator ``~``. For signed integer inputs, the two's complement is returned. In a two's-complement system negative numbers are represented by the two's complement of the absolute value. This is the most common method of representing signed integers on computers [1]_. A N-bit two's-complement system can represent every integer in the range :math:`-2^{N-1}` to :math:`+2^{N-1}-1`. Parameters ---------- x : array_like Only integer and boolean types are handled. $PARAMS Returns ------- out : ndarray or scalar Result. $OUT_SCALAR_1 See Also -------- bitwise_and, bitwise_or, bitwise_xor logical_not binary_repr : Return the binary representation of the input number as a string. Notes ----- `bitwise_not` is an alias for `invert`: >>> np.bitwise_not is np.invert True References ---------- .. [1] Wikipedia, "Two's complement", https://en.wikipedia.org/wiki/Two's_complement Examples -------- We've seen that 13 is represented by ``00001101``. The invert or bit-wise NOT of 13 is then: >>> x = np.invert(np.array(13, dtype=np.uint8)) >>> x 242 >>> np.binary_repr(x, width=8) '11110010' The result depends on the bit-width: >>> x = np.invert(np.array(13, dtype=np.uint16)) >>> x 65522 >>> np.binary_repr(x, width=16) '1111111111110010' When using signed integer types the result is the two's complement of the result for the unsigned type: >>> np.invert(np.array([13], dtype=np.int8)) array([-14], dtype=int8) >>> np.binary_repr(-14, width=8) '11110010' Booleans are accepted as well: >>> np.invert(np.array([True, False])) array([False, True]) The ``~`` operator can be used as a shorthand for ``np.invert`` on ndarrays. >>> x1 = np.array([True, False]) >>> ~x1 array([False, True]) """) add_newdoc('numpy.core.umath', 'isfinite', """ Test element-wise for finiteness (not infinity or not Not a Number). The result is returned as a boolean array. Parameters ---------- x : array_like Input values. $PARAMS Returns ------- y : ndarray, bool True where ``x`` is not positive infinity, negative infinity, or NaN; false otherwise. $OUT_SCALAR_1 See Also -------- isinf, isneginf, isposinf, isnan Notes ----- Not a Number, positive infinity and negative infinity are considered to be non-finite. NumPy uses the IEEE Standard for Binary Floating-Point for Arithmetic (IEEE 754). This means that Not a Number is not equivalent to infinity. Also that positive infinity is not equivalent to negative infinity. But infinity is equivalent to positive infinity. Errors result if the second argument is also supplied when `x` is a scalar input, or if first and second arguments have different shapes. Examples -------- >>> np.isfinite(1) True >>> np.isfinite(0) True >>> np.isfinite(np.nan) False >>> np.isfinite(np.inf) False >>> np.isfinite(np.NINF) False >>> np.isfinite([np.log(-1.),1.,np.log(0)]) array([False, True, False]) >>> x = np.array([-np.inf, 0., np.inf]) >>> y = np.array([2, 2, 2]) >>> np.isfinite(x, y) array([0, 1, 0]) >>> y array([0, 1, 0]) """) add_newdoc('numpy.core.umath', 'isinf', """ Test element-wise for positive or negative infinity. Returns a boolean array of the same shape as `x`, True where ``x == +/-inf``, otherwise False. Parameters ---------- x : array_like Input values $PARAMS Returns ------- y : bool (scalar) or boolean ndarray True where ``x`` is positive or negative infinity, false otherwise. $OUT_SCALAR_1 See Also -------- isneginf, isposinf, isnan, isfinite Notes ----- NumPy uses the IEEE Standard for Binary Floating-Point for Arithmetic (IEEE 754). Errors result if the second argument is supplied when the first argument is a scalar, or if the first and second arguments have different shapes. Examples -------- >>> np.isinf(np.inf) True >>> np.isinf(np.nan) False >>> np.isinf(np.NINF) True >>> np.isinf([np.inf, -np.inf, 1.0, np.nan]) array([ True, True, False, False]) >>> x = np.array([-np.inf, 0., np.inf]) >>> y = np.array([2, 2, 2]) >>> np.isinf(x, y) array([1, 0, 1]) >>> y array([1, 0, 1]) """) add_newdoc('numpy.core.umath', 'isnan', """ Test element-wise for NaN and return result as a boolean array. Parameters ---------- x : array_like Input array. $PARAMS Returns ------- y : ndarray or bool True where ``x`` is NaN, false otherwise. $OUT_SCALAR_1 See Also -------- isinf, isneginf, isposinf, isfinite, isnat Notes ----- NumPy uses the IEEE Standard for Binary Floating-Point for Arithmetic (IEEE 754). This means that Not a Number is not equivalent to infinity. Examples -------- >>> np.isnan(np.nan) True >>> np.isnan(np.inf) False >>> np.isnan([np.log(-1.),1.,np.log(0)]) array([ True, False, False]) """) add_newdoc('numpy.core.umath', 'isnat', """ Test element-wise for NaT (not a time) and return result as a boolean array. .. versionadded:: 1.13.0 Parameters ---------- x : array_like Input array with datetime or timedelta data type. $PARAMS Returns ------- y : ndarray or bool True where ``x`` is NaT, false otherwise. $OUT_SCALAR_1 See Also -------- isnan, isinf, isneginf, isposinf, isfinite Examples -------- >>> np.isnat(np.datetime64("NaT")) True >>> np.isnat(np.datetime64("2016-01-01")) False >>> np.isnat(np.array(["NaT", "2016-01-01"], dtype="datetime64[ns]")) array([ True, False]) """) add_newdoc('numpy.core.umath', 'left_shift', """ Shift the bits of an integer to the left. Bits are shifted to the left by appending `x2` 0s at the right of `x1`. Since the internal representation of numbers is in binary format, this operation is equivalent to multiplying `x1` by ``2**x2``. Parameters ---------- x1 : array_like of integer type Input values. x2 : array_like of integer type Number of zeros to append to `x1`. Has to be non-negative. $BROADCASTABLE_2 $PARAMS Returns ------- out : array of integer type Return `x1` with bits shifted `x2` times to the left. $OUT_SCALAR_2 See Also -------- right_shift : Shift the bits of an integer to the right. binary_repr : Return the binary representation of the input number as a string. Examples -------- >>> np.binary_repr(5) '101' >>> np.left_shift(5, 2) 20 >>> np.binary_repr(20) '10100' >>> np.left_shift(5, [1,2,3]) array([10, 20, 40]) Note that the dtype of the second argument may change the dtype of the result and can lead to unexpected results in some cases (see :ref:`Casting Rules <ufuncs.casting>`): >>> a = np.left_shift(np.uint8(255), 1) # Expect 254 >>> print(a, type(a)) # Unexpected result due to upcasting 510 <class 'numpy.int64'> >>> b = np.left_shift(np.uint8(255), np.uint8(1)) >>> print(b, type(b)) 254 <class 'numpy.uint8'> The ``<<`` operator can be used as a shorthand for ``np.left_shift`` on ndarrays. >>> x1 = 5 >>> x2 = np.array([1, 2, 3]) >>> x1 << x2 array([10, 20, 40]) """) add_newdoc('numpy.core.umath', 'less', """ Return the truth value of (x1 < x2) element-wise. Parameters ---------- x1, x2 : array_like Input arrays. $BROADCASTABLE_2 $PARAMS Returns ------- out : ndarray or scalar Output array, element-wise comparison of `x1` and `x2`. Typically of type bool, unless ``dtype=object`` is passed. $OUT_SCALAR_2 See Also -------- greater, less_equal, greater_equal, equal, not_equal Examples -------- >>> np.less([1, 2], [2, 2]) array([ True, False]) The ``<`` operator can be used as a shorthand for ``np.less`` on ndarrays. >>> a = np.array([1, 2]) >>> b = np.array([2, 2]) >>> a < b array([ True, False]) """) add_newdoc('numpy.core.umath', 'less_equal', """ Return the truth value of (x1 <= x2) element-wise. Parameters ---------- x1, x2 : array_like Input arrays. $BROADCASTABLE_2 $PARAMS Returns ------- out : ndarray or scalar Output array, element-wise comparison of `x1` and `x2`. Typically of type bool, unless ``dtype=object`` is passed. $OUT_SCALAR_2 See Also -------- greater, less, greater_equal, equal, not_equal Examples -------- >>> np.less_equal([4, 2, 1], [2, 2, 2]) array([False, True, True]) The ``<=`` operator can be used as a shorthand for ``np.less_equal`` on ndarrays. >>> a = np.array([4, 2, 1]) >>> b = np.array([2, 2, 2]) >>> a <= b array([False, True, True]) """) add_newdoc('numpy.core.umath', 'log', """ Natural logarithm, element-wise. The natural logarithm `log` is the inverse of the exponential function, so that `log(exp(x)) = x`. The natural logarithm is logarithm in base `e`. Parameters ---------- x : array_like Input value. $PARAMS Returns ------- y : ndarray The natural logarithm of `x`, element-wise. $OUT_SCALAR_1 See Also -------- log10, log2, log1p, emath.log Notes ----- Logarithm is a multivalued function: for each `x` there is an infinite number of `z` such that `exp(z) = x`. The convention is to return the `z` whose imaginary part lies in `[-pi, pi]`. For real-valued input data types, `log` always returns real output. For each value that cannot be expressed as a real number or infinity, it yields ``nan`` and sets the `invalid` floating point error flag. For complex-valued input, `log` is a complex analytical function that has a branch cut `[-inf, 0]` and is continuous from above on it. `log` handles the floating-point negative zero as an infinitesimal negative number, conforming to the C99 standard. References ---------- .. [1] M. Abramowitz and I.A. Stegun, "Handbook of Mathematical Functions", 10th printing, 1964, pp. 67. http://www.math.sfu.ca/~cbm/aands/ .. [2] Wikipedia, "Logarithm". https://en.wikipedia.org/wiki/Logarithm Examples -------- >>> np.log([1, np.e, np.e**2, 0]) array([ 0., 1., 2., -Inf]) """) add_newdoc('numpy.core.umath', 'log10', """ Return the base 10 logarithm of the input array, element-wise. Parameters ---------- x : array_like Input values. $PARAMS Returns ------- y : ndarray The logarithm to the base 10 of `x`, element-wise. NaNs are returned where x is negative. $OUT_SCALAR_1 See Also -------- emath.log10 Notes ----- Logarithm is a multivalued function: for each `x` there is an infinite number of `z` such that `10**z = x`. The convention is to return the `z` whose imaginary part lies in `[-pi, pi]`. For real-valued input data types, `log10` always returns real output. For each value that cannot be expressed as a real number or infinity, it yields ``nan`` and sets the `invalid` floating point error flag. For complex-valued input, `log10` is a complex analytical function that has a branch cut `[-inf, 0]` and is continuous from above on it. `log10` handles the floating-point negative zero as an infinitesimal negative number, conforming to the C99 standard. References ---------- .. [1] M. Abramowitz and I.A. Stegun, "Handbook of Mathematical Functions", 10th printing, 1964, pp. 67. http://www.math.sfu.ca/~cbm/aands/ .. [2] Wikipedia, "Logarithm". https://en.wikipedia.org/wiki/Logarithm Examples -------- >>> np.log10([1e-15, -3.]) array([-15., nan]) """) add_newdoc('numpy.core.umath', 'log2', """ Base-2 logarithm of `x`. Parameters ---------- x : array_like Input values. $PARAMS Returns ------- y : ndarray Base-2 logarithm of `x`. $OUT_SCALAR_1 See Also -------- log, log10, log1p, emath.log2 Notes ----- .. versionadded:: 1.3.0 Logarithm is a multivalued function: for each `x` there is an infinite number of `z` such that `2**z = x`. The convention is to return the `z` whose imaginary part lies in `[-pi, pi]`. For real-valued input data types, `log2` always returns real output. For each value that cannot be expressed as a real number or infinity, it yields ``nan`` and sets the `invalid` floating point error flag. For complex-valued input, `log2` is a complex analytical function that has a branch cut `[-inf, 0]` and is continuous from above on it. `log2` handles the floating-point negative zero as an infinitesimal negative number, conforming to the C99 standard. Examples -------- >>> x = np.array([0, 1, 2, 2**4]) >>> np.log2(x) array([-Inf, 0., 1., 4.]) >>> xi = np.array([0+1.j, 1, 2+0.j, 4.j]) >>> np.log2(xi) array([ 0.+2.26618007j, 0.+0.j , 1.+0.j , 2.+2.26618007j]) """) add_newdoc('numpy.core.umath', 'logaddexp', """ Logarithm of the sum of exponentiations of the inputs. Calculates ``log(exp(x1) + exp(x2))``. This function is useful in statistics where the calculated probabilities of events may be so small as to exceed the range of normal floating point numbers. In such cases the logarithm of the calculated probability is stored. This function allows adding probabilities stored in such a fashion. Parameters ---------- x1, x2 : array_like Input values. $BROADCASTABLE_2 $PARAMS Returns ------- result : ndarray Logarithm of ``exp(x1) + exp(x2)``. $OUT_SCALAR_2 See Also -------- logaddexp2: Logarithm of the sum of exponentiations of inputs in base 2. Notes ----- .. versionadded:: 1.3.0 Examples -------- >>> prob1 = np.log(1e-50) >>> prob2 = np.log(2.5e-50) >>> prob12 = np.logaddexp(prob1, prob2) >>> prob12 -113.87649168120691 >>> np.exp(prob12) 3.5000000000000057e-50 """) add_newdoc('numpy.core.umath', 'logaddexp2', """ Logarithm of the sum of exponentiations of the inputs in base-2. Calculates ``log2(2**x1 + 2**x2)``. This function is useful in machine learning when the calculated probabilities of events may be so small as to exceed the range of normal floating point numbers. In such cases the base-2 logarithm of the calculated probability can be used instead. This function allows adding probabilities stored in such a fashion. Parameters ---------- x1, x2 : array_like Input values. $BROADCASTABLE_2 $PARAMS Returns ------- result : ndarray Base-2 logarithm of ``2**x1 + 2**x2``. $OUT_SCALAR_2 See Also -------- logaddexp: Logarithm of the sum of exponentiations of the inputs. Notes ----- .. versionadded:: 1.3.0 Examples -------- >>> prob1 = np.log2(1e-50) >>> prob2 = np.log2(2.5e-50) >>> prob12 = np.logaddexp2(prob1, prob2) >>> prob1, prob2, prob12 (-166.09640474436813, -164.77447664948076, -164.28904982231052) >>> 2**prob12 3.4999999999999914e-50 """) add_newdoc('numpy.core.umath', 'log1p', """ Return the natural logarithm of one plus the input array, element-wise. Calculates ``log(1 + x)``. Parameters ---------- x : array_like Input values. $PARAMS Returns ------- y : ndarray Natural logarithm of `1 + x`, element-wise. $OUT_SCALAR_1 See Also -------- expm1 : ``exp(x) - 1``, the inverse of `log1p`. Notes ----- For real-valued input, `log1p` is accurate also for `x` so small that `1 + x == 1` in floating-point accuracy. Logarithm is a multivalued function: for each `x` there is an infinite number of `z` such that `exp(z) = 1 + x`. The convention is to return the `z` whose imaginary part lies in `[-pi, pi]`. For real-valued input data types, `log1p` always returns real output. For each value that cannot be expressed as a real number or infinity, it yields ``nan`` and sets the `invalid` floating point error flag. For complex-valued input, `log1p` is a complex analytical function that has a branch cut `[-inf, -1]` and is continuous from above on it. `log1p` handles the floating-point negative zero as an infinitesimal negative number, conforming to the C99 standard. References ---------- .. [1] M. Abramowitz and I.A. Stegun, "Handbook of Mathematical Functions", 10th printing, 1964, pp. 67. http://www.math.sfu.ca/~cbm/aands/ .. [2] Wikipedia, "Logarithm". https://en.wikipedia.org/wiki/Logarithm Examples -------- >>> np.log1p(1e-99) 1e-99 >>> np.log(1 + 1e-99) 0.0 """) add_newdoc('numpy.core.umath', 'logical_and', """ Compute the truth value of x1 AND x2 element-wise. Parameters ---------- x1, x2 : array_like Input arrays. $BROADCASTABLE_2 $PARAMS Returns ------- y : ndarray or bool Boolean result of the logical AND operation applied to the elements of `x1` and `x2`; the shape is determined by broadcasting. $OUT_SCALAR_2 See Also -------- logical_or, logical_not, logical_xor bitwise_and Examples -------- >>> np.logical_and(True, False) False >>> np.logical_and([True, False], [False, False]) array([False, False]) >>> x = np.arange(5) >>> np.logical_and(x>1, x<4) array([False, False, True, True, False]) The ``&`` operator can be used as a shorthand for ``np.logical_and`` on boolean ndarrays. >>> a = np.array([True, False]) >>> b = np.array([False, False]) >>> a & b array([False, False]) """) add_newdoc('numpy.core.umath', 'logical_not', """ Compute the truth value of NOT x element-wise. Parameters ---------- x : array_like Logical NOT is applied to the elements of `x`. $PARAMS Returns ------- y : bool or ndarray of bool Boolean result with the same shape as `x` of the NOT operation on elements of `x`. $OUT_SCALAR_1 See Also -------- logical_and, logical_or, logical_xor Examples -------- >>> np.logical_not(3) False >>> np.logical_not([True, False, 0, 1]) array([False, True, True, False]) >>> x = np.arange(5) >>> np.logical_not(x<3) array([False, False, False, True, True]) """) add_newdoc('numpy.core.umath', 'logical_or', """ Compute the truth value of x1 OR x2 element-wise. Parameters ---------- x1, x2 : array_like Logical OR is applied to the elements of `x1` and `x2`. $BROADCASTABLE_2 $PARAMS Returns ------- y : ndarray or bool Boolean result of the logical OR operation applied to the elements of `x1` and `x2`; the shape is determined by broadcasting. $OUT_SCALAR_2 See Also -------- logical_and, logical_not, logical_xor bitwise_or Examples -------- >>> np.logical_or(True, False) True >>> np.logical_or([True, False], [False, False]) array([ True, False]) >>> x = np.arange(5) >>> np.logical_or(x < 1, x > 3) array([ True, False, False, False, True]) The ``|`` operator can be used as a shorthand for ``np.logical_or`` on boolean ndarrays. >>> a = np.array([True, False]) >>> b = np.array([False, False]) >>> a | b array([ True, False]) """) add_newdoc('numpy.core.umath', 'logical_xor', """ Compute the truth value of x1 XOR x2, element-wise. Parameters ---------- x1, x2 : array_like Logical XOR is applied to the elements of `x1` and `x2`. $BROADCASTABLE_2 $PARAMS Returns ------- y : bool or ndarray of bool Boolean result of the logical XOR operation applied to the elements of `x1` and `x2`; the shape is determined by broadcasting. $OUT_SCALAR_2 See Also -------- logical_and, logical_or, logical_not, bitwise_xor Examples -------- >>> np.logical_xor(True, False) True >>> np.logical_xor([True, True, False, False], [True, False, True, False]) array([False, True, True, False]) >>> x = np.arange(5) >>> np.logical_xor(x < 1, x > 3) array([ True, False, False, False, True]) Simple example showing support of broadcasting >>> np.logical_xor(0, np.eye(2)) array([[ True, False], [False, True]]) """) add_newdoc('numpy.core.umath', 'maximum', """ Element-wise maximum of array elements. Compare two arrays and returns a new array containing the element-wise maxima. If one of the elements being compared is a NaN, then that element is returned. If both elements are NaNs then the first is returned. The latter distinction is important for complex NaNs, which are defined as at least one of the real or imaginary parts being a NaN. The net effect is that NaNs are propagated. Parameters ---------- x1, x2 : array_like The arrays holding the elements to be compared. $BROADCASTABLE_2 $PARAMS Returns ------- y : ndarray or scalar The maximum of `x1` and `x2`, element-wise. $OUT_SCALAR_2 See Also -------- minimum : Element-wise minimum of two arrays, propagates NaNs. fmax : Element-wise maximum of two arrays, ignores NaNs. amax : The maximum value of an array along a given axis, propagates NaNs. nanmax : The maximum value of an array along a given axis, ignores NaNs. fmin, amin, nanmin Notes ----- The maximum is equivalent to ``np.where(x1 >= x2, x1, x2)`` when neither x1 nor x2 are nans, but it is faster and does proper broadcasting. Examples -------- >>> np.maximum([2, 3, 4], [1, 5, 2]) array([2, 5, 4]) >>> np.maximum(np.eye(2), [0.5, 2]) # broadcasting array([[ 1. , 2. ], [ 0.5, 2. ]]) >>> np.maximum([np.nan, 0, np.nan], [0, np.nan, np.nan]) array([nan, nan, nan]) >>> np.maximum(np.Inf, 1) inf """) add_newdoc('numpy.core.umath', 'minimum', """ Element-wise minimum of array elements. Compare two arrays and returns a new array containing the element-wise minima. If one of the elements being compared is a NaN, then that element is returned. If both elements are NaNs then the first is returned. The latter distinction is important for complex NaNs, which are defined as at least one of the real or imaginary parts being a NaN. The net effect is that NaNs are propagated. Parameters ---------- x1, x2 : array_like The arrays holding the elements to be compared. $BROADCASTABLE_2 $PARAMS Returns ------- y : ndarray or scalar The minimum of `x1` and `x2`, element-wise. $OUT_SCALAR_2 See Also -------- maximum : Element-wise maximum of two arrays, propagates NaNs. fmin : Element-wise minimum of two arrays, ignores NaNs. amin : The minimum value of an array along a given axis, propagates NaNs. nanmin : The minimum value of an array along a given axis, ignores NaNs. fmax, amax, nanmax Notes ----- The minimum is equivalent to ``np.where(x1 <= x2, x1, x2)`` when neither x1 nor x2 are NaNs, but it is faster and does proper broadcasting. Examples -------- >>> np.minimum([2, 3, 4], [1, 5, 2]) array([1, 3, 2]) >>> np.minimum(np.eye(2), [0.5, 2]) # broadcasting array([[ 0.5, 0. ], [ 0. , 1. ]]) >>> np.minimum([np.nan, 0, np.nan],[0, np.nan, np.nan]) array([nan, nan, nan]) >>> np.minimum(-np.Inf, 1) -inf """) add_newdoc('numpy.core.umath', 'fmax', """ Element-wise maximum of array elements. Compare two arrays and returns a new array containing the element-wise maxima. If one of the elements being compared is a NaN, then the non-nan element is returned. If both elements are NaNs then the first is returned. The latter distinction is important for complex NaNs, which are defined as at least one of the real or imaginary parts being a NaN. The net effect is that NaNs are ignored when possible. Parameters ---------- x1, x2 : array_like The arrays holding the elements to be compared. $BROADCASTABLE_2 $PARAMS Returns ------- y : ndarray or scalar The maximum of `x1` and `x2`, element-wise. $OUT_SCALAR_2 See Also -------- fmin : Element-wise minimum of two arrays, ignores NaNs. maximum : Element-wise maximum of two arrays, propagates NaNs. amax : The maximum value of an array along a given axis, propagates NaNs. nanmax : The maximum value of an array along a given axis, ignores NaNs. minimum, amin, nanmin Notes ----- .. versionadded:: 1.3.0 The fmax is equivalent to ``np.where(x1 >= x2, x1, x2)`` when neither x1 nor x2 are NaNs, but it is faster and does proper broadcasting. Examples -------- >>> np.fmax([2, 3, 4], [1, 5, 2]) array([ 2., 5., 4.]) >>> np.fmax(np.eye(2), [0.5, 2]) array([[ 1. , 2. ], [ 0.5, 2. ]]) >>> np.fmax([np.nan, 0, np.nan],[0, np.nan, np.nan]) array([ 0., 0., nan]) """) add_newdoc('numpy.core.umath', 'fmin', """ Element-wise minimum of array elements. Compare two arrays and returns a new array containing the element-wise minima. If one of the elements being compared is a NaN, then the non-nan element is returned. If both elements are NaNs then the first is returned. The latter distinction is important for complex NaNs, which are defined as at least one of the real or imaginary parts being a NaN. The net effect is that NaNs are ignored when possible. Parameters ---------- x1, x2 : array_like The arrays holding the elements to be compared. $BROADCASTABLE_2 $PARAMS Returns ------- y : ndarray or scalar The minimum of `x1` and `x2`, element-wise. $OUT_SCALAR_2 See Also -------- fmax : Element-wise maximum of two arrays, ignores NaNs. minimum : Element-wise minimum of two arrays, propagates NaNs. amin : The minimum value of an array along a given axis, propagates NaNs. nanmin : The minimum value of an array along a given axis, ignores NaNs. maximum, amax, nanmax Notes ----- .. versionadded:: 1.3.0 The fmin is equivalent to ``np.where(x1 <= x2, x1, x2)`` when neither x1 nor x2 are NaNs, but it is faster and does proper broadcasting. Examples -------- >>> np.fmin([2, 3, 4], [1, 5, 2]) array([1, 3, 2]) >>> np.fmin(np.eye(2), [0.5, 2]) array([[ 0.5, 0. ], [ 0. , 1. ]]) >>> np.fmin([np.nan, 0, np.nan],[0, np.nan, np.nan]) array([ 0., 0., nan]) """) add_newdoc('numpy.core.umath', 'clip', """ Clip (limit) the values in an array. Given an interval, values outside the interval are clipped to the interval edges. For example, if an interval of ``[0, 1]`` is specified, values smaller than 0 become 0, and values larger than 1 become 1. Equivalent to but faster than ``np.minimum(np.maximum(a, a_min), a_max)``. Parameters ---------- a : array_like Array containing elements to clip. a_min : array_like Minimum value. a_max : array_like Maximum value. out : ndarray, optional The results will be placed in this array. It may be the input array for in-place clipping. `out` must be of the right shape to hold the output. Its type is preserved. $PARAMS See Also -------- numpy.clip : Wrapper that makes the ``a_min`` and ``a_max`` arguments optional, dispatching to one of `~numpy.core.umath.clip`, `~numpy.core.umath.minimum`, and `~numpy.core.umath.maximum`. Returns ------- clipped_array : ndarray An array with the elements of `a`, but where values < `a_min` are replaced with `a_min`, and those > `a_max` with `a_max`. """) add_newdoc('numpy.core.umath', 'matmul', """ Matrix product of two arrays. Parameters ---------- x1, x2 : array_like Input arrays, scalars not allowed. out : ndarray, optional A location into which the result is stored. If provided, it must have a shape that matches the signature `(n,k),(k,m)->(n,m)`. If not provided or None, a freshly-allocated array is returned. **kwargs For other keyword-only arguments, see the :ref:`ufunc docs <ufuncs.kwargs>`. .. versionadded:: 1.16 Now handles ufunc kwargs Returns ------- y : ndarray The matrix product of the inputs. This is a scalar only when both x1, x2 are 1-d vectors. Raises ------ ValueError If the last dimension of `x1` is not the same size as the second-to-last dimension of `x2`. If a scalar value is passed in. See Also -------- vdot : Complex-conjugating dot product. tensordot : Sum products over arbitrary axes. einsum : Einstein summation convention. dot : alternative matrix product with different broadcasting rules. Notes ----- The behavior depends on the arguments in the following way. - If both arguments are 2-D they are multiplied like conventional matrices. - If either argument is N-D, N > 2, it is treated as a stack of matrices residing in the last two indexes and broadcast accordingly. - If the first argument is 1-D, it is promoted to a matrix by prepending a 1 to its dimensions. After matrix multiplication the prepended 1 is removed. - If the second argument is 1-D, it is promoted to a matrix by appending a 1 to its dimensions. After matrix multiplication the appended 1 is removed. ``matmul`` differs from ``dot`` in two important ways: - Multiplication by scalars is not allowed, use ``*`` instead. - Stacks of matrices are broadcast together as if the matrices were elements, respecting the signature ``(n,k),(k,m)->(n,m)``: >>> a = np.ones([9, 5, 7, 4]) >>> c = np.ones([9, 5, 4, 3]) >>> np.dot(a, c).shape (9, 5, 7, 9, 5, 3) >>> np.matmul(a, c).shape (9, 5, 7, 3) >>> # n is 7, k is 4, m is 3 The matmul function implements the semantics of the ``@`` operator introduced in Python 3.5 following :pep:`465`. Examples -------- For 2-D arrays it is the matrix product: >>> a = np.array([[1, 0], ... [0, 1]]) >>> b = np.array([[4, 1], ... [2, 2]]) >>> np.matmul(a, b) array([[4, 1], [2, 2]]) For 2-D mixed with 1-D, the result is the usual. >>> a = np.array([[1, 0], ... [0, 1]]) >>> b = np.array([1, 2]) >>> np.matmul(a, b) array([1, 2]) >>> np.matmul(b, a) array([1, 2]) Broadcasting is conventional for stacks of arrays >>> a = np.arange(2 * 2 * 4).reshape((2, 2, 4)) >>> b = np.arange(2 * 2 * 4).reshape((2, 4, 2)) >>> np.matmul(a,b).shape (2, 2, 2) >>> np.matmul(a, b)[0, 1, 1] 98 >>> sum(a[0, 1, :] * b[0 , :, 1]) 98 Vector, vector returns the scalar inner product, but neither argument is complex-conjugated: >>> np.matmul([2j, 3j], [2j, 3j]) (-13+0j) Scalar multiplication raises an error. >>> np.matmul([1,2], 3) Traceback (most recent call last): ... ValueError: matmul: Input operand 1 does not have enough dimensions ... The ``@`` operator can be used as a shorthand for ``np.matmul`` on ndarrays. >>> x1 = np.array([2j, 3j]) >>> x2 = np.array([2j, 3j]) >>> x1 @ x2 (-13+0j) .. versionadded:: 1.10.0 """) add_newdoc('numpy.core.umath', 'modf', """ Return the fractional and integral parts of an array, element-wise. The fractional and integral parts are negative if the given number is negative. Parameters ---------- x : array_like Input array. $PARAMS Returns ------- y1 : ndarray Fractional part of `x`. $OUT_SCALAR_1 y2 : ndarray Integral part of `x`. $OUT_SCALAR_1 Notes ----- For integer input the return values are floats. See Also -------- divmod : ``divmod(x, 1)`` is equivalent to ``modf`` with the return values switched, except it always has a positive remainder. Examples -------- >>> np.modf([0, 3.5]) (array([ 0. , 0.5]), array([ 0., 3.])) >>> np.modf(-0.5) (-0.5, -0) """) add_newdoc('numpy.core.umath', 'multiply', """ Multiply arguments element-wise. Parameters ---------- x1, x2 : array_like Input arrays to be multiplied. $BROADCASTABLE_2 $PARAMS Returns ------- y : ndarray The product of `x1` and `x2`, element-wise. $OUT_SCALAR_2 Notes ----- Equivalent to `x1` * `x2` in terms of array broadcasting. Examples -------- >>> np.multiply(2.0, 4.0) 8.0 >>> x1 = np.arange(9.0).reshape((3, 3)) >>> x2 = np.arange(3.0) >>> np.multiply(x1, x2) array([[ 0., 1., 4.], [ 0., 4., 10.], [ 0., 7., 16.]]) The ``*`` operator can be used as a shorthand for ``np.multiply`` on ndarrays. >>> x1 = np.arange(9.0).reshape((3, 3)) >>> x2 = np.arange(3.0) >>> x1 * x2 array([[ 0., 1., 4.], [ 0., 4., 10.], [ 0., 7., 16.]]) """) add_newdoc('numpy.core.umath', 'negative', """ Numerical negative, element-wise. Parameters ---------- x : array_like or scalar Input array. $PARAMS Returns ------- y : ndarray or scalar Returned array or scalar: `y = -x`. $OUT_SCALAR_1 Examples -------- >>> np.negative([1.,-1.]) array([-1., 1.]) The unary ``-`` operator can be used as a shorthand for ``np.negative`` on ndarrays. >>> x1 = np.array(([1., -1.])) >>> -x1 array([-1., 1.]) """) add_newdoc('numpy.core.umath', 'positive', """ Numerical positive, element-wise. .. versionadded:: 1.13.0 Parameters ---------- x : array_like or scalar Input array. Returns ------- y : ndarray or scalar Returned array or scalar: `y = +x`. $OUT_SCALAR_1 Notes ----- Equivalent to `x.copy()`, but only defined for types that support arithmetic. Examples -------- >>> x1 = np.array(([1., -1.])) >>> np.positive(x1) array([ 1., -1.]) The unary ``+`` operator can be used as a shorthand for ``np.positive`` on ndarrays. >>> x1 = np.array(([1., -1.])) >>> +x1 array([ 1., -1.]) """) add_newdoc('numpy.core.umath', 'not_equal', """ Return (x1 != x2) element-wise. Parameters ---------- x1, x2 : array_like Input arrays. $BROADCASTABLE_2 $PARAMS Returns ------- out : ndarray or scalar Output array, element-wise comparison of `x1` and `x2`. Typically of type bool, unless ``dtype=object`` is passed. $OUT_SCALAR_2 See Also -------- equal, greater, greater_equal, less, less_equal Examples -------- >>> np.not_equal([1.,2.], [1., 3.]) array([False, True]) >>> np.not_equal([1, 2], [[1, 3],[1, 4]]) array([[False, True], [False, True]]) The ``!=`` operator can be used as a shorthand for ``np.not_equal`` on ndarrays. >>> a = np.array([1., 2.]) >>> b = np.array([1., 3.]) >>> a != b array([False, True]) """) add_newdoc('numpy.core.umath', '_ones_like', """ This function used to be the numpy.ones_like, but now a specific function for that has been written for consistency with the other *_like functions. It is only used internally in a limited fashion now. See Also -------- ones_like """) add_newdoc('numpy.core.umath', 'power', """ First array elements raised to powers from second array, element-wise. Raise each base in `x1` to the positionally-corresponding power in `x2`. `x1` and `x2` must be broadcastable to the same shape. Note that an integer type raised to a negative integer power will raise a ValueError. Parameters ---------- x1 : array_like The bases. x2 : array_like The exponents. $BROADCASTABLE_2 $PARAMS Returns ------- y : ndarray The bases in `x1` raised to the exponents in `x2`. $OUT_SCALAR_2 See Also -------- float_power : power function that promotes integers to float Examples -------- Cube each element in an array. >>> x1 = np.arange(6) >>> x1 [0, 1, 2, 3, 4, 5] >>> np.power(x1, 3) array([ 0, 1, 8, 27, 64, 125]) Raise the bases to different exponents. >>> x2 = [1.0, 2.0, 3.0, 3.0, 2.0, 1.0] >>> np.power(x1, x2) array([ 0., 1., 8., 27., 16., 5.]) The effect of broadcasting. >>> x2 = np.array([[1, 2, 3, 3, 2, 1], [1, 2, 3, 3, 2, 1]]) >>> x2 array([[1, 2, 3, 3, 2, 1], [1, 2, 3, 3, 2, 1]]) >>> np.power(x1, x2) array([[ 0, 1, 8, 27, 16, 5], [ 0, 1, 8, 27, 16, 5]]) The ``**`` operator can be used as a shorthand for ``np.power`` on ndarrays. >>> x2 = np.array([1, 2, 3, 3, 2, 1]) >>> x1 = np.arange(6) >>> x1 ** x2 array([ 0, 1, 8, 27, 16, 5]) """) add_newdoc('numpy.core.umath', 'float_power', """ First array elements raised to powers from second array, element-wise. Raise each base in `x1` to the positionally-corresponding power in `x2`. `x1` and `x2` must be broadcastable to the same shape. This differs from the power function in that integers, float16, and float32 are promoted to floats with a minimum precision of float64 so that the result is always inexact. The intent is that the function will return a usable result for negative powers and seldom overflow for positive powers. .. versionadded:: 1.12.0 Parameters ---------- x1 : array_like The bases. x2 : array_like The exponents. $BROADCASTABLE_2 $PARAMS Returns ------- y : ndarray The bases in `x1` raised to the exponents in `x2`. $OUT_SCALAR_2 See Also -------- power : power function that preserves type Examples -------- Cube each element in a list. >>> x1 = range(6) >>> x1 [0, 1, 2, 3, 4, 5] >>> np.float_power(x1, 3) array([ 0., 1., 8., 27., 64., 125.]) Raise the bases to different exponents. >>> x2 = [1.0, 2.0, 3.0, 3.0, 2.0, 1.0] >>> np.float_power(x1, x2) array([ 0., 1., 8., 27., 16., 5.]) The effect of broadcasting. >>> x2 = np.array([[1, 2, 3, 3, 2, 1], [1, 2, 3, 3, 2, 1]]) >>> x2 array([[1, 2, 3, 3, 2, 1], [1, 2, 3, 3, 2, 1]]) >>> np.float_power(x1, x2) array([[ 0., 1., 8., 27., 16., 5.], [ 0., 1., 8., 27., 16., 5.]]) """) add_newdoc('numpy.core.umath', 'radians', """ Convert angles from degrees to radians. Parameters ---------- x : array_like Input array in degrees. $PARAMS Returns ------- y : ndarray The corresponding radian values. $OUT_SCALAR_1 See Also -------- deg2rad : equivalent function Examples -------- Convert a degree array to radians >>> deg = np.arange(12.) * 30. >>> np.radians(deg) array([ 0. , 0.52359878, 1.04719755, 1.57079633, 2.0943951 , 2.61799388, 3.14159265, 3.66519143, 4.1887902 , 4.71238898, 5.23598776, 5.75958653]) >>> out = np.zeros((deg.shape)) >>> ret = np.radians(deg, out) >>> ret is out True """) add_newdoc('numpy.core.umath', 'deg2rad', """ Convert angles from degrees to radians. Parameters ---------- x : array_like Angles in degrees. $PARAMS Returns ------- y : ndarray The corresponding angle in radians. $OUT_SCALAR_1 See Also -------- rad2deg : Convert angles from radians to degrees. unwrap : Remove large jumps in angle by wrapping. Notes ----- .. versionadded:: 1.3.0 ``deg2rad(x)`` is ``x * pi / 180``. Examples -------- >>> np.deg2rad(180) 3.1415926535897931 """) add_newdoc('numpy.core.umath', 'reciprocal', """ Return the reciprocal of the argument, element-wise. Calculates ``1/x``. Parameters ---------- x : array_like Input array. $PARAMS Returns ------- y : ndarray Return array. $OUT_SCALAR_1 Notes ----- .. note:: This function is not designed to work with integers. For integer arguments with absolute value larger than 1 the result is always zero because of the way Python handles integer division. For integer zero the result is an overflow. Examples -------- >>> np.reciprocal(2.) 0.5 >>> np.reciprocal([1, 2., 3.33]) array([ 1. , 0.5 , 0.3003003]) """) add_newdoc('numpy.core.umath', 'remainder', """ Return element-wise remainder of division. Computes the remainder complementary to the `floor_divide` function. It is equivalent to the Python modulus operator``x1 % x2`` and has the same sign as the divisor `x2`. The MATLAB function equivalent to ``np.remainder`` is ``mod``. .. warning:: This should not be confused with: * Python 3.7's `math.remainder` and C's ``remainder``, which computes the IEEE remainder, which are the complement to ``round(x1 / x2)``. * The MATLAB ``rem`` function and or the C ``%`` operator which is the complement to ``int(x1 / x2)``. Parameters ---------- x1 : array_like Dividend array. x2 : array_like Divisor array. $BROADCASTABLE_2 $PARAMS Returns ------- y : ndarray The element-wise remainder of the quotient ``floor_divide(x1, x2)``. $OUT_SCALAR_2 See Also -------- floor_divide : Equivalent of Python ``//`` operator. divmod : Simultaneous floor division and remainder. fmod : Equivalent of the MATLAB ``rem`` function. divide, floor Notes ----- Returns 0 when `x2` is 0 and both `x1` and `x2` are (arrays of) integers. ``mod`` is an alias of ``remainder``. Examples -------- >>> np.remainder([4, 7], [2, 3]) array([0, 1]) >>> np.remainder(np.arange(7), 5) array([0, 1, 2, 3, 4, 0, 1]) The ``%`` operator can be used as a shorthand for ``np.remainder`` on ndarrays. >>> x1 = np.arange(7) >>> x1 % 5 array([0, 1, 2, 3, 4, 0, 1]) """) add_newdoc('numpy.core.umath', 'divmod', """ Return element-wise quotient and remainder simultaneously. .. versionadded:: 1.13.0 ``np.divmod(x, y)`` is equivalent to ``(x // y, x % y)``, but faster because it avoids redundant work. It is used to implement the Python built-in function ``divmod`` on NumPy arrays. Parameters ---------- x1 : array_like Dividend array. x2 : array_like Divisor array. $BROADCASTABLE_2 $PARAMS Returns ------- out1 : ndarray Element-wise quotient resulting from floor division. $OUT_SCALAR_2 out2 : ndarray Element-wise remainder from floor division. $OUT_SCALAR_2 See Also -------- floor_divide : Equivalent to Python's ``//`` operator. remainder : Equivalent to Python's ``%`` operator. modf : Equivalent to ``divmod(x, 1)`` for positive ``x`` with the return values switched. Examples -------- >>> np.divmod(np.arange(5), 3) (array([0, 0, 0, 1, 1]), array([0, 1, 2, 0, 1])) The `divmod` function can be used as a shorthand for ``np.divmod`` on ndarrays. >>> x = np.arange(5) >>> divmod(x, 3) (array([0, 0, 0, 1, 1]), array([0, 1, 2, 0, 1])) """) add_newdoc('numpy.core.umath', 'right_shift', """ Shift the bits of an integer to the right. Bits are shifted to the right `x2`. Because the internal representation of numbers is in binary format, this operation is equivalent to dividing `x1` by ``2**x2``. Parameters ---------- x1 : array_like, int Input values. x2 : array_like, int Number of bits to remove at the right of `x1`. $BROADCASTABLE_2 $PARAMS Returns ------- out : ndarray, int Return `x1` with bits shifted `x2` times to the right. $OUT_SCALAR_2 See Also -------- left_shift : Shift the bits of an integer to the left. binary_repr : Return the binary representation of the input number as a string. Examples -------- >>> np.binary_repr(10) '1010' >>> np.right_shift(10, 1) 5 >>> np.binary_repr(5) '101' >>> np.right_shift(10, [1,2,3]) array([5, 2, 1]) The ``>>`` operator can be used as a shorthand for ``np.right_shift`` on ndarrays. >>> x1 = 10 >>> x2 = np.array([1,2,3]) >>> x1 >> x2 array([5, 2, 1]) """) add_newdoc('numpy.core.umath', 'rint', """ Round elements of the array to the nearest integer. Parameters ---------- x : array_like Input array. $PARAMS Returns ------- out : ndarray or scalar Output array is same shape and type as `x`. $OUT_SCALAR_1 See Also -------- fix, ceil, floor, trunc Notes ----- For values exactly halfway between rounded decimal values, NumPy rounds to the nearest even value. Thus 1.5 and 2.5 round to 2.0, -0.5 and 0.5 round to 0.0, etc. Examples -------- >>> a = np.array([-1.7, -1.5, -0.2, 0.2, 1.5, 1.7, 2.0]) >>> np.rint(a) array([-2., -2., -0., 0., 2., 2., 2.]) """) add_newdoc('numpy.core.umath', 'sign', """ Returns an element-wise indication of the sign of a number. The `sign` function returns ``-1 if x < 0, 0 if x==0, 1 if x > 0``. nan is returned for nan inputs. For complex inputs, the `sign` function returns ``sign(x.real) + 0j if x.real != 0 else sign(x.imag) + 0j``. complex(nan, 0) is returned for complex nan inputs. Parameters ---------- x : array_like Input values. $PARAMS Returns ------- y : ndarray The sign of `x`. $OUT_SCALAR_1 Notes ----- There is more than one definition of sign in common use for complex numbers. The definition used here is equivalent to :math:`x/\\sqrt{x*x}` which is different from a common alternative, :math:`x/|x|`. Examples -------- >>> np.sign([-5., 4.5]) array([-1., 1.]) >>> np.sign(0) 0 >>> np.sign(5-2j) (1+0j) """) add_newdoc('numpy.core.umath', 'signbit', """ Returns element-wise True where signbit is set (less than zero). Parameters ---------- x : array_like The input value(s). $PARAMS Returns ------- result : ndarray of bool Output array, or reference to `out` if that was supplied. $OUT_SCALAR_1 Examples -------- >>> np.signbit(-1.2) True >>> np.signbit(np.array([1, -2.3, 2.1])) array([False, True, False]) """) add_newdoc('numpy.core.umath', 'copysign', """ Change the sign of x1 to that of x2, element-wise. If `x2` is a scalar, its sign will be copied to all elements of `x1`. Parameters ---------- x1 : array_like Values to change the sign of. x2 : array_like The sign of `x2` is copied to `x1`. $BROADCASTABLE_2 $PARAMS Returns ------- out : ndarray or scalar The values of `x1` with the sign of `x2`. $OUT_SCALAR_2 Examples -------- >>> np.copysign(1.3, -1) -1.3 >>> 1/np.copysign(0, 1) inf >>> 1/np.copysign(0, -1) -inf >>> np.copysign([-1, 0, 1], -1.1) array([-1., -0., -1.]) >>> np.copysign([-1, 0, 1], np.arange(3)-1) array([-1., 0., 1.]) """) add_newdoc('numpy.core.umath', 'nextafter', """ Return the next floating-point value after x1 towards x2, element-wise. Parameters ---------- x1 : array_like Values to find the next representable value of. x2 : array_like The direction where to look for the next representable value of `x1`. $BROADCASTABLE_2 $PARAMS Returns ------- out : ndarray or scalar The next representable values of `x1` in the direction of `x2`. $OUT_SCALAR_2 Examples -------- >>> eps = np.finfo(np.float64).eps >>> np.nextafter(1, 2) == eps + 1 True >>> np.nextafter([1, 2], [2, 1]) == [eps + 1, 2 - eps] array([ True, True]) """) add_newdoc('numpy.core.umath', 'spacing', """ Return the distance between x and the nearest adjacent number. Parameters ---------- x : array_like Values to find the spacing of. $PARAMS Returns ------- out : ndarray or scalar The spacing of values of `x`. $OUT_SCALAR_1 Notes ----- It can be considered as a generalization of EPS: ``spacing(np.float64(1)) == np.finfo(np.float64).eps``, and there should not be any representable number between ``x + spacing(x)`` and x for any finite x. Spacing of +- inf and NaN is NaN. Examples -------- >>> np.spacing(1) == np.finfo(np.float64).eps True """) add_newdoc('numpy.core.umath', 'sin', """ Trigonometric sine, element-wise. Parameters ---------- x : array_like Angle, in radians (:math:`2 \\pi` rad equals 360 degrees). $PARAMS Returns ------- y : array_like The sine of each element of x. $OUT_SCALAR_1 See Also -------- arcsin, sinh, cos Notes ----- The sine is one of the fundamental functions of trigonometry (the mathematical study of triangles). Consider a circle of radius 1 centered on the origin. A ray comes in from the :math:`+x` axis, makes an angle at the origin (measured counter-clockwise from that axis), and departs from the origin. The :math:`y` coordinate of the outgoing ray's intersection with the unit circle is the sine of that angle. It ranges from -1 for :math:`x=3\\pi / 2` to +1 for :math:`\\pi / 2.` The function has zeroes where the angle is a multiple of :math:`\\pi`. Sines of angles between :math:`\\pi` and :math:`2\\pi` are negative. The numerous properties of the sine and related functions are included in any standard trigonometry text. Examples -------- Print sine of one angle: >>> np.sin(np.pi/2.) 1.0 Print sines of an array of angles given in degrees: >>> np.sin(np.array((0., 30., 45., 60., 90.)) * np.pi / 180. ) array([ 0. , 0.5 , 0.70710678, 0.8660254 , 1. ]) Plot the sine function: >>> import matplotlib.pylab as plt >>> x = np.linspace(-np.pi, np.pi, 201) >>> plt.plot(x, np.sin(x)) >>> plt.xlabel('Angle [rad]') >>> plt.ylabel('sin(x)') >>> plt.axis('tight') >>> plt.show() """) add_newdoc('numpy.core.umath', 'sinh', """ Hyperbolic sine, element-wise. Equivalent to ``1/2 * (np.exp(x) - np.exp(-x))`` or ``-1j * np.sin(1j*x)``. Parameters ---------- x : array_like Input array. $PARAMS Returns ------- y : ndarray The corresponding hyperbolic sine values. $OUT_SCALAR_1 Notes ----- If `out` is provided, the function writes the result into it, and returns a reference to `out`. (See Examples) References ---------- M. Abramowitz and I. A. Stegun, Handbook of Mathematical Functions. New York, NY: Dover, 1972, pg. 83. Examples -------- >>> np.sinh(0) 0.0 >>> np.sinh(np.pi*1j/2) 1j >>> np.sinh(np.pi*1j) # (exact value is 0) 1.2246063538223773e-016j >>> # Discrepancy due to vagaries of floating point arithmetic. >>> # Example of providing the optional output parameter >>> out1 = np.array([0], dtype='d') >>> out2 = np.sinh([0.1], out1) >>> out2 is out1 True >>> # Example of ValueError due to provision of shape mis-matched `out` >>> np.sinh(np.zeros((3,3)),np.zeros((2,2))) Traceback (most recent call last): File "<stdin>", line 1, in <module> ValueError: operands could not be broadcast together with shapes (3,3) (2,2) """) add_newdoc('numpy.core.umath', 'sqrt', """ Return the non-negative square-root of an array, element-wise. Parameters ---------- x : array_like The values whose square-roots are required. $PARAMS Returns ------- y : ndarray An array of the same shape as `x`, containing the positive square-root of each element in `x`. If any element in `x` is complex, a complex array is returned (and the square-roots of negative reals are calculated). If all of the elements in `x` are real, so is `y`, with negative elements returning ``nan``. If `out` was provided, `y` is a reference to it. $OUT_SCALAR_1 See Also -------- lib.scimath.sqrt A version which returns complex numbers when given negative reals. Notes ----- *sqrt* has--consistent with common convention--as its branch cut the real "interval" [`-inf`, 0), and is continuous from above on it. A branch cut is a curve in the complex plane across which a given complex function fails to be continuous. Examples -------- >>> np.sqrt([1,4,9]) array([ 1., 2., 3.]) >>> np.sqrt([4, -1, -3+4J]) array([ 2.+0.j, 0.+1.j, 1.+2.j]) >>> np.sqrt([4, -1, np.inf]) array([ 2., nan, inf]) """) add_newdoc('numpy.core.umath', 'cbrt', """ Return the cube-root of an array, element-wise. .. versionadded:: 1.10.0 Parameters ---------- x : array_like The values whose cube-roots are required. $PARAMS Returns ------- y : ndarray An array of the same shape as `x`, containing the cube cube-root of each element in `x`. If `out` was provided, `y` is a reference to it. $OUT_SCALAR_1 Examples -------- >>> np.cbrt([1,8,27]) array([ 1., 2., 3.]) """) add_newdoc('numpy.core.umath', 'square', """ Return the element-wise square of the input. Parameters ---------- x : array_like Input data. $PARAMS Returns ------- out : ndarray or scalar Element-wise `x*x`, of the same shape and dtype as `x`. $OUT_SCALAR_1 See Also -------- numpy.linalg.matrix_power sqrt power Examples -------- >>> np.square([-1j, 1]) array([-1.-0.j, 1.+0.j]) """) add_newdoc('numpy.core.umath', 'subtract', """ Subtract arguments, element-wise. Parameters ---------- x1, x2 : array_like The arrays to be subtracted from each other. $BROADCASTABLE_2 $PARAMS Returns ------- y : ndarray The difference of `x1` and `x2`, element-wise. $OUT_SCALAR_2 Notes ----- Equivalent to ``x1 - x2`` in terms of array broadcasting. Examples -------- >>> np.subtract(1.0, 4.0) -3.0 >>> x1 = np.arange(9.0).reshape((3, 3)) >>> x2 = np.arange(3.0) >>> np.subtract(x1, x2) array([[ 0., 0., 0.], [ 3., 3., 3.], [ 6., 6., 6.]]) The ``-`` operator can be used as a shorthand for ``np.subtract`` on ndarrays. >>> x1 = np.arange(9.0).reshape((3, 3)) >>> x2 = np.arange(3.0) >>> x1 - x2 array([[0., 0., 0.], [3., 3., 3.], [6., 6., 6.]]) """) add_newdoc('numpy.core.umath', 'tan', """ Compute tangent element-wise. Equivalent to ``np.sin(x)/np.cos(x)`` element-wise. Parameters ---------- x : array_like Input array. $PARAMS Returns ------- y : ndarray The corresponding tangent values. $OUT_SCALAR_1 Notes ----- If `out` is provided, the function writes the result into it, and returns a reference to `out`. (See Examples) References ---------- M. Abramowitz and I. A. Stegun, Handbook of Mathematical Functions. New York, NY: Dover, 1972. Examples -------- >>> from math import pi >>> np.tan(np.array([-pi,pi/2,pi])) array([ 1.22460635e-16, 1.63317787e+16, -1.22460635e-16]) >>> >>> # Example of providing the optional output parameter illustrating >>> # that what is returned is a reference to said parameter >>> out1 = np.array([0], dtype='d') >>> out2 = np.cos([0.1], out1) >>> out2 is out1 True >>> >>> # Example of ValueError due to provision of shape mis-matched `out` >>> np.cos(np.zeros((3,3)),np.zeros((2,2))) Traceback (most recent call last): File "<stdin>", line 1, in <module> ValueError: operands could not be broadcast together with shapes (3,3) (2,2) """) add_newdoc('numpy.core.umath', 'tanh', """ Compute hyperbolic tangent element-wise. Equivalent to ``np.sinh(x)/np.cosh(x)`` or ``-1j * np.tan(1j*x)``. Parameters ---------- x : array_like Input array. $PARAMS Returns ------- y : ndarray The corresponding hyperbolic tangent values. $OUT_SCALAR_1 Notes ----- If `out` is provided, the function writes the result into it, and returns a reference to `out`. (See Examples) References ---------- .. [1] M. Abramowitz and I. A. Stegun, Handbook of Mathematical Functions. New York, NY: Dover, 1972, pg. 83. http://www.math.sfu.ca/~cbm/aands/ .. [2] Wikipedia, "Hyperbolic function", https://en.wikipedia.org/wiki/Hyperbolic_function Examples -------- >>> np.tanh((0, np.pi*1j, np.pi*1j/2)) array([ 0. +0.00000000e+00j, 0. -1.22460635e-16j, 0. +1.63317787e+16j]) >>> # Example of providing the optional output parameter illustrating >>> # that what is returned is a reference to said parameter >>> out1 = np.array([0], dtype='d') >>> out2 = np.tanh([0.1], out1) >>> out2 is out1 True >>> # Example of ValueError due to provision of shape mis-matched `out` >>> np.tanh(np.zeros((3,3)),np.zeros((2,2))) Traceback (most recent call last): File "<stdin>", line 1, in <module> ValueError: operands could not be broadcast together with shapes (3,3) (2,2) """) add_newdoc('numpy.core.umath', 'true_divide', """ Returns a true division of the inputs, element-wise. Instead of the Python traditional 'floor division', this returns a true division. True division adjusts the output type to present the best answer, regardless of input types. Parameters ---------- x1 : array_like Dividend array. x2 : array_like Divisor array. $BROADCASTABLE_2 $PARAMS Returns ------- out : ndarray or scalar $OUT_SCALAR_2 Notes ----- In Python, ``//`` is the floor division operator and ``/`` the true division operator. The ``true_divide(x1, x2)`` function is equivalent to true division in Python. Examples -------- >>> x = np.arange(5) >>> np.true_divide(x, 4) array([ 0. , 0.25, 0.5 , 0.75, 1. ]) >>> x/4 array([ 0. , 0.25, 0.5 , 0.75, 1. ]) >>> x//4 array([0, 0, 0, 0, 1]) The ``/`` operator can be used as a shorthand for ``np.true_divide`` on ndarrays. >>> x = np.arange(5) >>> x / 4 array([0. , 0.25, 0.5 , 0.75, 1. ]) """) add_newdoc('numpy.core.umath', 'frexp', """ Decompose the elements of x into mantissa and twos exponent. Returns (`mantissa`, `exponent`), where `x = mantissa * 2**exponent``. The mantissa lies in the open interval(-1, 1), while the twos exponent is a signed integer. Parameters ---------- x : array_like Array of numbers to be decomposed. out1 : ndarray, optional Output array for the mantissa. Must have the same shape as `x`. out2 : ndarray, optional Output array for the exponent. Must have the same shape as `x`. $PARAMS Returns ------- mantissa : ndarray Floating values between -1 and 1. $OUT_SCALAR_1 exponent : ndarray Integer exponents of 2. $OUT_SCALAR_1 See Also -------- ldexp : Compute ``y = x1 * 2**x2``, the inverse of `frexp`. Notes ----- Complex dtypes are not supported, they will raise a TypeError. Examples -------- >>> x = np.arange(9) >>> y1, y2 = np.frexp(x) >>> y1 array([ 0. , 0.5 , 0.5 , 0.75 , 0.5 , 0.625, 0.75 , 0.875, 0.5 ]) >>> y2 array([0, 1, 2, 2, 3, 3, 3, 3, 4]) >>> y1 * 2**y2 array([ 0., 1., 2., 3., 4., 5., 6., 7., 8.]) """) add_newdoc('numpy.core.umath', 'ldexp', """ Returns x1 * 2**x2, element-wise. The mantissas `x1` and twos exponents `x2` are used to construct floating point numbers ``x1 * 2**x2``. Parameters ---------- x1 : array_like Array of multipliers. x2 : array_like, int Array of twos exponents. $BROADCASTABLE_2 $PARAMS Returns ------- y : ndarray or scalar The result of ``x1 * 2**x2``. $OUT_SCALAR_2 See Also -------- frexp : Return (y1, y2) from ``x = y1 * 2**y2``, inverse to `ldexp`. Notes ----- Complex dtypes are not supported, they will raise a TypeError. `ldexp` is useful as the inverse of `frexp`, if used by itself it is more clear to simply use the expression ``x1 * 2**x2``. Examples -------- >>> np.ldexp(5, np.arange(4)) array([ 5., 10., 20., 40.], dtype=float16) >>> x = np.arange(6) >>> np.ldexp(*np.frexp(x)) array([ 0., 1., 2., 3., 4., 5.]) """) add_newdoc('numpy.core.umath', 'gcd', """ Returns the greatest common divisor of ``|x1|`` and ``|x2|`` Parameters ---------- x1, x2 : array_like, int Arrays of values. $BROADCASTABLE_2 Returns ------- y : ndarray or scalar The greatest common divisor of the absolute value of the inputs $OUT_SCALAR_2 See Also -------- lcm : The lowest common multiple Examples -------- >>> np.gcd(12, 20) 4 >>> np.gcd.reduce([15, 25, 35]) 5 >>> np.gcd(np.arange(6), 20) array([20, 1, 2, 1, 4, 5]) """) add_newdoc('numpy.core.umath', 'lcm', """ Returns the lowest common multiple of ``|x1|`` and ``|x2|`` Parameters ---------- x1, x2 : array_like, int Arrays of values. $BROADCASTABLE_2 Returns ------- y : ndarray or scalar The lowest common multiple of the absolute value of the inputs $OUT_SCALAR_2 See Also -------- gcd : The greatest common divisor Examples -------- >>> np.lcm(12, 20) 60 >>> np.lcm.reduce([3, 12, 20]) 60 >>> np.lcm.reduce([40, 12, 20]) 120 >>> np.lcm(np.arange(6), 20) array([ 0, 20, 20, 60, 20, 20]) """)
import os import sys import pytest import numpy as np from . import util from numpy.testing import assert_array_equal def _path(*a): return os.path.join(*((os.path.dirname(__file__),) + a)) class TestCommonBlock(util.F2PyTest): sources = [_path('src', 'common', 'block.f')] @pytest.mark.skipif(sys.platform=='win32', reason='Fails with MinGW64 Gfortran (Issue #9673)') def test_common_block(self): self.module.initcb() assert_array_equal(self.module.block.long_bn, np.array(1.0, dtype=np.float64)) assert_array_equal(self.module.block.string_bn, np.array('2', dtype='|S1')) assert_array_equal(self.module.block.ok, np.array(3, dtype=np.int32))
seberg/numpy
numpy/f2py/tests/test_common.py
numpy/core/code_generators/ufunc_docstrings.py
# -*- encoding: utf-8 -*- from abjad import * def test_indicatortools_Clef_middle_c_position_01(): assert Clef('treble').middle_c_position == pitchtools.StaffPosition(-6) assert Clef('alto').middle_c_position == pitchtools.StaffPosition(0) assert Clef('tenor').middle_c_position == pitchtools.StaffPosition(2) assert Clef('bass').middle_c_position == pitchtools.StaffPosition(6) assert Clef('treble^8').middle_c_position == pitchtools.StaffPosition(-13) assert Clef('alto^15').middle_c_position == pitchtools.StaffPosition(-13) assert Clef('tenor_8').middle_c_position == pitchtools.StaffPosition(9) assert Clef('bass_15').middle_c_position == pitchtools.StaffPosition(19)
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/indicatortools/test/test_indicatortools_Clef_middle_c_position.py
# -*- encoding: utf-8 -*- from abjad.tools import indicatortools from abjad.tools import pitchtools from abjad.tools import scoretools from abjad.tools.topleveltools import iterate def iterate_out_of_range_notes_and_chords(expr): '''Iterates notes and chords in `expr` outside traditional instrument ranges: :: >>> staff = Staff("c'8 r8 <d fs>8 r8") >>> violin = instrumenttools.Violin() >>> attach(violin, staff) :: >>> list( ... instrumenttools.iterate_out_of_range_notes_and_chords( ... staff)) [Chord('<d fs>8')] Returns generator. ''' from abjad.tools import instrumenttools prototype = (scoretools.Note, scoretools.Chord) for note_or_chord in iterate(expr).by_class(prototype): instrument = note_or_chord._get_effective( instrumenttools.Instrument) if instrument is None: message = 'no instrument found.' raise ValueError(message) if note_or_chord not in instrument.pitch_range: yield note_or_chord
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/instrumenttools/iterate_out_of_range_notes_and_chords.py
# -*- encoding: utf-8 -*- from abjad.tools.datastructuretools.TreeNode import TreeNode class ReSTHorizontalRule(TreeNode): r'''A ReST horizontal rule. :: >>> rule = documentationtools.ReSTHorizontalRule() >>> rule ReSTHorizontalRule() :: >>> print(rule.rest_format) -------- ''' ### CLASS VARIABLES ### __documentation_section__ = 'reStructuredText' ### PRIVATE PROPERTIES ### @property def _rest_format_contributions(self): return ['--------'] ### PUBLIC PROPERTIES ### @property def rest_format(self): r'''ReST format of ReSt horizontal rule. Returns text. ''' return '\n'.join(self._rest_format_contributions)
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/documentationtools/ReSTHorizontalRule.py
# -*- encoding: utf-8 -*- from abjad import * def test_pitchtools_NumberedPitch_pitch_number_01(): assert pitchtools.NumberedPitch("cff''").pitch_number == 10 assert pitchtools.NumberedPitch("ctqf''").pitch_number == 10.5 assert pitchtools.NumberedPitch("cf''").pitch_number == 11 assert pitchtools.NumberedPitch("cqf''").pitch_number == 11.5 assert pitchtools.NumberedPitch("c''").pitch_number == 12 assert pitchtools.NumberedPitch("cqs''").pitch_number == 12.5 assert pitchtools.NumberedPitch("cs''").pitch_number == 13 assert pitchtools.NumberedPitch("ctqs''").pitch_number == 13.5 assert pitchtools.NumberedPitch("css''").pitch_number == 14 assert pitchtools.NumberedPitch("d''").pitch_number == 14
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/pitchtools/test/test_pitchtools_NumberedPitch_pitch_number.py
# -*- encoding: utf-8 -*- import functools from abjad.tools import durationtools from abjad.tools.schemetools.Scheme import Scheme @functools.total_ordering class SchemeMoment(Scheme): r'''A LilyPond scheme moment. Initializes with two integers: :: >>> moment = schemetools.SchemeMoment(1, 68) >>> moment SchemeMoment(1, 68) Scheme moments are immutable. ''' ### CLASS VARIABLES ### __slots__ = ( ) ### INITIALIZER ### def __init__(self, *args, **kwargs): if len(args) == 1 and durationtools.Duration.is_token(args[0]): args = durationtools.Duration(args[0]) elif len(args) == 1 and isinstance(args[0], type(self)): args = args[0].duration elif len(args) == 2 and \ isinstance(args[0], int) and isinstance(args[1], int): args = durationtools.Duration(args) elif len(args) == 0: args = durationtools.Duration((1, 4)) else: message = 'can not intialize {}: {!r}.' message = message.format(type(self).__name__, args) raise TypeError(message) Scheme.__init__(self, args, **kwargs) ### SPECIAL METHODS ### def __eq__(self, arg): r'''Is true when `arg` is a scheme moment with the same value as that of this scheme moment. :: >>> moment == schemetools.SchemeMoment(1, 68) True Otherwise false. >>> moment == schemetools.SchemeMoment(1, 54) False Returns boolean. ''' if isinstance(arg, type(self)): if self._value == arg._value: return True return False def __getnewargs__(self): r'''Gets new arguments. Returns tuple. ''' return (self._value,) def __hash__(self): r'''Hashes scheme moment. Required to be explicitly re-defined on Python 3 if __eq__ changes. Returns integer. ''' return super(SchemeMoment, self).__hash__() def __lt__(self, arg): r'''Is true when `arg` is a scheme moment with value greater than that of this scheme moment. :: >>> moment < schemetools.SchemeMoment(1, 32) True Otherwise false: :: >>> moment < schemetools.SchemeMoment(1, 78) False Returns boolean. ''' if isinstance(arg, type(self)): if self._value < arg._value: return True return False ### PRIVATE PROPERTIES ### @property def _formatted_value(self): numerator, denominator = self._value.numerator, self._value.denominator return '(ly:make-moment {} {})'.format(numerator, denominator) @property def _storage_format_specification(self): from abjad.tools import systemtools return systemtools.StorageFormatSpecification( self, positional_argument_values=( self._value.numerator, self._value.denominator, ), ) ### PUBLIC PROPERTIES ### @property def duration(self): r'''Duration of scheme moment. :: >>> scheme_moment = schemetools.SchemeMoment(1, 68) >>> scheme_moment.duration Duration(1, 68) Returns duration. ''' return self._value
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/schemetools/SchemeMoment.py
# -*- encoding: utf-8 -*- from abjad import * def configure_lilypond_file(lilypond_file): r'''Configures LilyPond file. ''' lilypond_file.global_staff_size = 8 context_block = lilypondfiletools.ContextBlock( source_context_name=r'Staff \RemoveEmptyStaves', ) override(context_block).vertical_axis_group.remove_first = True lilypond_file.layout_block.items.append(context_block) slash_separator = indicatortools.LilyPondCommand('slashSeparator') lilypond_file.paper_block.system_separator_markup = slash_separator bottom_margin = lilypondfiletools.LilyPondDimension(0.5, 'in') lilypond_file.paper_block.bottom_margin = bottom_margin top_margin = lilypondfiletools.LilyPondDimension(0.5, 'in') lilypond_file.paper_block.top_margin = top_margin left_margin = lilypondfiletools.LilyPondDimension(0.75, 'in') lilypond_file.paper_block.left_margin = left_margin right_margin = lilypondfiletools.LilyPondDimension(0.5, 'in') lilypond_file.paper_block.right_margin = right_margin paper_width = lilypondfiletools.LilyPondDimension(5.25, 'in') lilypond_file.paper_block.paper_width = paper_width paper_height = lilypondfiletools.LilyPondDimension(7.25, 'in') lilypond_file.paper_block.paper_height = paper_height lilypond_file.header_block.composer = markuptools.Markup('Arvo Pärt') title = 'Cantus in Memory of Benjamin Britten (1980)' lilypond_file.header_block.title = markuptools.Markup(title)
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/demos/part/configure_lilypond_file.py
# -*- encoding: utf-8 -*- from abjad import * def test_selectiontools_Selection__get_component_01(): staff = Staff("abj: | 2/8 c'8 d'8 || 2/8 e'8 f'8 || 2/8 g'8 a'8 |") assert select(staff)._get_component(Measure, 0) is staff[0] assert select(staff)._get_component(Measure, 1) is staff[1] assert select(staff)._get_component(Measure, 2) is staff[2] def test_selectiontools_Selection__get_component_02(): staff = Staff("abj: | 2/8 c'8 d'8 || 2/8 e'8 f'8 || 2/8 g'8 a'8 |") assert select(staff)._get_component(Measure, -1) is staff[2] assert select(staff)._get_component(Measure, -2) is staff[1] assert select(staff)._get_component(Measure, -3) is staff[0] def test_selectiontools_Selection__get_component_03(): r'''Read forwards for positive n. ''' staff = Staff("abj: | 2/8 c'8 d'8 || 2/8 e'8 f'8 || 2/8 g'8 a'8 |") r''' \new Staff { { \time 2/8 c'8 d'8 } { \time 2/8 e'8 f'8 } { \time 2/8 g'8 a'8 } } ''' assert select(staff)._get_component(scoretools.Leaf, 0) is staff[0][0] assert select(staff)._get_component(scoretools.Leaf, 1) is staff[0][1] assert select(staff)._get_component(scoretools.Leaf, 2) is staff[1][0] assert select(staff)._get_component(scoretools.Leaf, 3) is staff[1][1] assert select(staff)._get_component(scoretools.Leaf, 4) is staff[2][0] assert select(staff)._get_component(scoretools.Leaf, 5) is staff[2][1] def test_selectiontools_Selection__get_component_04(): r'''Read backwards for negative n. ''' staff = Staff("abj: | 2/8 c'8 d'8 || 2/8 e'8 f'8 || 2/8 g'8 a'8 |") r''' \new Staff { { \time 2/8 c'8 d'8 } { \time 2/8 e'8 f'8 } { \time 2/8 g'8 a'8 } } ''' assert select(staff)._get_component(scoretools.Leaf, -1) is staff[2][1] assert select(staff)._get_component(scoretools.Leaf, -2) is staff[2][0] assert select(staff)._get_component(scoretools.Leaf, -3) is staff[1][1] assert select(staff)._get_component(scoretools.Leaf, -4) is staff[1][0] assert select(staff)._get_component(scoretools.Leaf, -5) is staff[0][1] assert select(staff)._get_component(scoretools.Leaf, -6) is staff[0][0] def test_selectiontools_Selection__get_component_05(): staff = Staff(r''' c'16 r16 d'8 r8 e'8. r8. f'4 r4 ''') notes = [staff[0], staff[2], staff[4], staff[6]] rests = [staff[1], staff[3], staff[5], staff[7]] assert select(staff)._get_component(Note, 0) is notes[0] assert select(staff)._get_component(Note, 1) is notes[1] assert select(staff)._get_component(Note, 2) is notes[2] assert select(staff)._get_component(Note, 3) is notes[3] assert select(staff)._get_component(Rest, 0) is rests[0] assert select(staff)._get_component(Rest, 1) is rests[1] assert select(staff)._get_component(Rest, 2) is rests[2] assert select(staff)._get_component(Rest, 3) is rests[3] assert select(staff)._get_component(Staff, 0) is staff def test_selectiontools_Selection__get_component_06(): r'''Iterates backwards with negative values of n. ''' staff = Staff(r''' c'16 r16 d'8 r8 e'8. r8. f'4 r4 ''') notes = [staff[0], staff[2], staff[4], staff[6]] rests = [staff[1], staff[3], staff[5], staff[7]] assert select(staff)._get_component(Note, -1) is notes[3] assert select(staff)._get_component(Note, -2) is notes[2] assert select(staff)._get_component(Note, -3) is notes[1] assert select(staff)._get_component(Note, -4) is notes[0] assert select(staff)._get_component(Rest, -1) is rests[3] assert select(staff)._get_component(Rest, -2) is rests[2] assert select(staff)._get_component(Rest, -3) is rests[1] assert select(staff)._get_component(Rest, -4) is rests[0]
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/selectiontools/test/test_selectiontools_Selection__get_component.py
# -*- encoding: utf-8 -*- import sys from abjad import * def test_stringtools_strip_diacritics_01(): if sys.version_info[0] == 2: binary_string = 'Dvo\xc5\x99\xc3\xa1k' else: binary_string = 'Dvořák' ascii_string = stringtools.strip_diacritics(binary_string) assert ascii_string == 'Dvorak'
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/stringtools/test/test_stringtools_strip_diacritics.py
# -*- encoding: utf-8 -*- from abjad.tools import durationtools from abjad.tools import mathtools from abjad.tools import sequencetools from abjad.tools.pitchtools.Segment import Segment from abjad.tools.topleveltools import new class IntervalSegment(Segment): r'''An interval segment. :: >>> intervals = 'm2 M10 -aug4 P5' >>> pitchtools.IntervalSegment(intervals) IntervalSegment(['+m2', '+M10', '-aug4', '+P5']) :: >>> pitch_segment = pitchtools.PitchSegment("c d e f g a b c'") >>> pitchtools.IntervalSegment(pitch_segment) IntervalSegment(['+M2', '+M2', '+m2', '+M2', '+M2', '+M2', '+m2']) ''' ### CLASS VARIABLES ### __slots__ = () ### INITIALIZER ### def __init__( self, items=None, item_class=None, ): from abjad.tools import pitchtools if isinstance(items, pitchtools.PitchSegment): intervals = [] for one, two in sequencetools.iterate_sequence_nwise(items): intervals.append(one - two) items = intervals Segment.__init__( self, items=items, item_class=item_class, ) ### PRIVATE PROPERTIES ### @property def _named_item_class(self): from abjad.tools import pitchtools return pitchtools.NamedInterval @property def _numbered_item_class(self): from abjad.tools import pitchtools return pitchtools.NumberedInterval @property def _parent_item_class(self): from abjad.tools import pitchtools return pitchtools.Interval @property def _repr_specification(self): items = [] if self.item_class.__name__.startswith('Named'): items = [str(x) for x in self] else: items = [x.number for x in self] return new( self._storage_format_specification, is_indented=False, keyword_argument_names=(), positional_argument_values=( items, ), ) ### PUBLIC METHODS ### @classmethod def from_selection( cls, selection, item_class=None, ): r'''Makes interval segment from component `selection`. :: >>> staff = Staff("c'8 d'8 e'8 f'8 g'8 a'8 b'8 c''8") >>> pitchtools.IntervalSegment.from_selection( ... staff, item_class=pitchtools.NumberedInterval) IntervalSegment([2, 2, 1, 2, 2, 2, 1]) Returns interval segment. ''' from abjad.tools import pitchtools pitch_segment = pitchtools.PitchSegment.from_selection(selection) intervals = (-x for x in mathtools.difference_series(pitch_segment)) return cls( items=intervals, item_class=item_class, ) def rotate(self, n): r'''Rotates interval segment by `n`. Returns new interval segment. ''' return new(self, self[-n:] + self[:-n]) ### PUBLIC PROPERTIES ### @property def has_duplicates(self): r'''True if segment has duplicate items. Otherwise false. :: >>> intervals = 'm2 M3 -aug4 m2 P5' >>> segment = pitchtools.IntervalSegment(intervals) >>> segment.has_duplicates True :: >>> intervals = 'M3 -aug4 m2 P5' >>> segment = pitchtools.IntervalSegment(intervals) >>> segment.has_duplicates False Returns boolean. ''' from abjad.tools import pitchtools return len(pitchtools.IntervalSet(self)) < len(self) @property def slope(self): r'''Slope of interval segment. The slope of a interval segment is the sum of its intervals divided by its length: :: >>> pitchtools.IntervalSegment([1, 2]).slope Multiplier(3, 2) Returns multiplier. ''' return durationtools.Multiplier.from_float( sum([x.number for x in self])) / len(self) @property def spread(self): r'''Spread of interval segment. The maximum interval spanned by any combination of the intervals within a numbered interval segment. :: >>> pitchtools.IntervalSegment([1, 2, -3, 1, -2, 1]).spread NumberedInterval(4.0) :: >>> pitchtools.IntervalSegment([1, 1, 1, 2, -3, -2]).spread NumberedInterval(5.0) Returns numbered interval. ''' from abjad.tools import pitchtools current = maximum = minimum = 0 for x in self: current += float(x) if maximum < current: maximum = current if current < minimum: minimum = current return pitchtools.NumberedInterval(maximum - minimum)
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/pitchtools/IntervalSegment.py
# -*- encoding: utf-8 -*- from abjad import * def test_pitchtools_yield_all_pitch_class_sets_01(): U_star = pitchtools.yield_all_pitch_class_sets() assert len(U_star) == 4096 assert pitchtools.PitchClassSet([0, 1, 2]) in U_star assert pitchtools.PitchClassSet([1, 2, 3]) in U_star assert pitchtools.PitchClassSet([3, 4, 8, 9, 11]) in U_star assert pitchtools.PitchClassSet(range(12)) in U_star
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/pitchtools/test/test_pitchtools_yield_all_pitch_class_sets.py
# -*- encoding: utf-8 -*- from abjad import * def test_pitchtools_NumberedPitchClass___add___01(): r'''Ascending numbered interval added to pitch-class. ''' pc = pitchtools.NumberedPitchClass(0) MCI = pitchtools.NumberedInterval assert pc + MCI(1) == pitchtools.NumberedPitchClass(1) assert pc + MCI(2) == pitchtools.NumberedPitchClass(2) assert pc + MCI(3) == pitchtools.NumberedPitchClass(3) assert pc + MCI(4) == pitchtools.NumberedPitchClass(4) assert pc + MCI(5) == pitchtools.NumberedPitchClass(5) assert pc + MCI(6) == pitchtools.NumberedPitchClass(6) assert pc + MCI(7) == pitchtools.NumberedPitchClass(7) assert pc + MCI(8) == pitchtools.NumberedPitchClass(8) assert pc + MCI(9) == pitchtools.NumberedPitchClass(9) assert pc + MCI(10) == pitchtools.NumberedPitchClass(10) assert pc + MCI(11) == pitchtools.NumberedPitchClass(11) def test_pitchtools_NumberedPitchClass___add___02(): r'''Ascending numbered interval added to pitch-class. ''' pc = pitchtools.NumberedPitchClass(0) MCI = pitchtools.NumberedInterval assert pc + MCI(12) == pitchtools.NumberedPitchClass(0) assert pc + MCI(13) == pitchtools.NumberedPitchClass(1) assert pc + MCI(14) == pitchtools.NumberedPitchClass(2) assert pc + MCI(15) == pitchtools.NumberedPitchClass(3) assert pc + MCI(16) == pitchtools.NumberedPitchClass(4) assert pc + MCI(17) == pitchtools.NumberedPitchClass(5) assert pc + MCI(18) == pitchtools.NumberedPitchClass(6) assert pc + MCI(19) == pitchtools.NumberedPitchClass(7) assert pc + MCI(20) == pitchtools.NumberedPitchClass(8) assert pc + MCI(21) == pitchtools.NumberedPitchClass(9) assert pc + MCI(22) == pitchtools.NumberedPitchClass(10) assert pc + MCI(23) == pitchtools.NumberedPitchClass(11) def test_pitchtools_NumberedPitchClass___add___03(): r'''Descending numbered interval added to pitch-class. ''' pc = pitchtools.NumberedPitchClass(0) MCI = pitchtools.NumberedInterval assert pc + MCI(-1) == pitchtools.NumberedPitchClass(11) assert pc + MCI(-2) == pitchtools.NumberedPitchClass(10) assert pc + MCI(-3) == pitchtools.NumberedPitchClass(9) assert pc + MCI(-4) == pitchtools.NumberedPitchClass(8) assert pc + MCI(-5) == pitchtools.NumberedPitchClass(7) assert pc + MCI(-6) == pitchtools.NumberedPitchClass(6) assert pc + MCI(-7) == pitchtools.NumberedPitchClass(5) assert pc + MCI(-8) == pitchtools.NumberedPitchClass(4) assert pc + MCI(-9) == pitchtools.NumberedPitchClass(3) assert pc + MCI(-10) == pitchtools.NumberedPitchClass(2) assert pc + MCI(-11) == pitchtools.NumberedPitchClass(1) def test_pitchtools_NumberedPitchClass___add___04(): r'''Descending numbered interval added to pitch-class. ''' pc = pitchtools.NumberedPitchClass(0) MCI = pitchtools.NumberedInterval assert pc + MCI(-12) == pitchtools.NumberedPitchClass(0) assert pc + MCI(-13) == pitchtools.NumberedPitchClass(11) assert pc + MCI(-14) == pitchtools.NumberedPitchClass(10) assert pc + MCI(-15) == pitchtools.NumberedPitchClass(9) assert pc + MCI(-16) == pitchtools.NumberedPitchClass(8) assert pc + MCI(-17) == pitchtools.NumberedPitchClass(7) assert pc + MCI(-18) == pitchtools.NumberedPitchClass(6) assert pc + MCI(-19) == pitchtools.NumberedPitchClass(5) assert pc + MCI(-20) == pitchtools.NumberedPitchClass(4) assert pc + MCI(-21) == pitchtools.NumberedPitchClass(3) assert pc + MCI(-22) == pitchtools.NumberedPitchClass(2) assert pc + MCI(-23) == pitchtools.NumberedPitchClass(1) def test_pitchtools_NumberedPitchClass___add___05(): r'''numbered unison added to pitch-class. ''' pc = pitchtools.NumberedPitchClass(0) MCI = pitchtools.NumberedInterval assert pc + MCI(0) == pitchtools.NumberedPitchClass(0)
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/pitchtools/test/test_pitchtools_NumberedPitchClass___add__.py
# -*- encoding: utf-8 -*- from abjad import * def test_schemetools_Scheme_format_scheme_value_01(): assert schemetools.Scheme.format_scheme_value(1) == '1' assert schemetools.Scheme.format_scheme_value(True) == '#t' assert schemetools.Scheme.format_scheme_value(False) == '#f' assert schemetools.Scheme.format_scheme_value('foo bar') == '"foo bar"' assert schemetools.Scheme.format_scheme_value('baz') == 'baz' assert schemetools.Scheme.format_scheme_value([1, 2, 3]) == '(1 2 3)'
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/schemetools/test/test_schemetools_Scheme_format_scheme_value.py
# -*- encoding: utf-8 -*- import abc from abjad.tools.abctools.AbjadObject import AbjadObject class TypedCollection(AbjadObject): r'''Abstract base class for typed collections. ''' ### CLASS VARIABLES ### __slots__ = ( '_collection', '_item_class', ) ### INITIALIZER ### @abc.abstractmethod def __init__(self, items=None, item_class=None): assert isinstance(item_class, (type(None), type)) self._item_class = item_class ### SPECIAL METHODS ### def __contains__(self, item): r'''Is true when typed collection container `item`. Otherwise false. Returns boolean. ''' try: item = self._item_coercer(item) except ValueError: return False return self._collection.__contains__(item) def __eq__(self, expr): r'''Is true when `expr` is a typed collection with items that compare equal to those of this typed collection. Otherwise false. Returns boolean. ''' if isinstance(expr, type(self)): return self._collection == expr._collection elif isinstance(expr, type(self._collection)): return self._collection == expr return False def __format__(self, format_specification=''): r'''Formats typed collection. Set `format_specification` to `''` or `'storage'`. Interprets `''` equal to `'storage'`. Returns string. ''' from abjad.tools import systemtools if format_specification in ('', 'storage'): return systemtools.StorageFormatManager.get_storage_format(self) return str(self) def __getnewargs__(self): r'''Gets new arguments. Returns tuple. ''' return (self._collection, self.item_class) def __hash__(self): r'''Hashes typed collection. Required to be explicitly re-defined on Python 3 if __eq__ changes. Returns integer. ''' return super(TypedCollection, self).__hash__() def __iter__(self): r'''Iterates typed collection. Returns generator. ''' return self._collection.__iter__() def __len__(self): r'''Length of typed collection. Returns nonnegative integer. ''' return len(self._collection) def __ne__(self, expr): r'''Is true when `expr` is not a typed collection with items equal to this typed collection. Otherwise false. Returns boolean. ''' return not self.__eq__(expr) ### PRIVATE METHODS ### def _on_insertion(self, item): r'''Override to operate on item after insertion into collection. ''' pass def _on_removal(self, item): r'''Override to operate on item after removal from collection. ''' pass ### PRIVATE PROPERTIES ### @property def _item_coercer(self): def coerce_(x): if isinstance(x, self._item_class): return x return self._item_class(x) if self._item_class is None: return lambda x: x return coerce_ @property def _repr_specification(self): from abjad.tools import systemtools manager = systemtools.StorageFormatManager names = manager.get_signature_keyword_argument_names(self) keyword_argument_names = list(names) if 'items' in keyword_argument_names: keyword_argument_names.remove('items') keyword_argument_names = tuple(keyword_argument_names) positional_argument_values = ( self._collection, ) return systemtools.StorageFormatSpecification( self, is_indented=False, keyword_argument_names=keyword_argument_names, positional_argument_values=positional_argument_values, ) @property def _storage_format_specification(self): from abjad.tools import systemtools manager = systemtools.StorageFormatManager names = manager.get_signature_keyword_argument_names(self) keyword_argument_names = list(names) if 'items' in keyword_argument_names: keyword_argument_names.remove('items') keyword_argument_names = tuple(keyword_argument_names) positional_argument_values = ( self._collection, ) return systemtools.StorageFormatSpecification( self, keyword_argument_names=keyword_argument_names, positional_argument_values=positional_argument_values, ) ### PUBLIC PROPERTIES ### @property def item_class(self): r'''Item class to coerce items into. ''' return self._item_class @property def items(self): r'''Gets collection items. ''' return [x for x in self]
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/datastructuretools/TypedCollection.py
# -*- encoding: utf-8 -*- def timespan_2_overlaps_all_of_timespan_1( timespan_1=None, timespan_2=None, hold=False, ): r'''Makes time relation indicating that `timespan_2` overlaps all of `timespan_1`. :: >>> relation = timespantools.timespan_2_overlaps_all_of_timespan_1() >>> print(format(relation)) timespantools.TimespanTimespanTimeRelation( inequality=timespantools.CompoundInequality( [ timespantools.SimpleInequality('timespan_2.start_offset < timespan_1.start_offset'), timespantools.SimpleInequality('timespan_1.stop_offset < timespan_2.stop_offset'), ], logical_operator='and', ), ) Returns time relation or boolean. ''' from abjad.tools import timespantools inequality = timespantools.CompoundInequality([ 'timespan_2.start_offset < timespan_1.start_offset', 'timespan_1.stop_offset < timespan_2.stop_offset', ]) time_relation = timespantools.TimespanTimespanTimeRelation( inequality, timespan_1=timespan_1, timespan_2=timespan_2, ) if time_relation.is_fully_loaded and not hold: return time_relation() else: return time_relation
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/timespantools/timespan_2_overlaps_all_of_timespan_1.py
# -*- encoding: utf-8 -*- from abjad import * def test_pitchtools_PitchClassSet_multiply_01(): assert pitchtools.PitchClassSet([0, 1, 5]).multiply(5) == \ pitchtools.PitchClassSet([0, 1, 5]) assert pitchtools.PitchClassSet([1, 2, 6]).multiply(5) == \ pitchtools.PitchClassSet([5, 6, 10]) assert pitchtools.PitchClassSet([2, 3, 7]).multiply(5) == \ pitchtools.PitchClassSet([3, 10, 11])
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/pitchtools/test/test_pitchtools_PitchClassSet_multiply.py
# -*- encoding: utf-8 -*- from abjad import * def test_scoretools_Container_index_01(): r'''Elements that compare equal return different indices in container. ''' container = Container(4 * Note("c'4")) assert container.index(container[0]) == 0 assert container.index(container[1]) == 1 assert container.index(container[2]) == 2 assert container.index(container[3]) == 3
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/scoretools/test/test_scoretools_Container_index.py
# -*- encoding: utf-8 -*- from abjad import * def test_pitchtools_list_pitch_numbers_in_expr_01(): tuplet = scoretools.FixedDurationTuplet(Duration(2, 8), "c'8 d'8 e'8") assert pitchtools.list_pitch_numbers_in_expr(tuplet) == (0, 2, 4) def test_pitchtools_list_pitch_numbers_in_expr_02(): staff = Staff("c'8 d'8 e'8 f'8") assert pitchtools.list_pitch_numbers_in_expr(staff) == (0, 2, 4, 5)
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/pitchtools/test/test_pitchtools_list_pitch_numbers_in_expr.py
# -*- encoding: utf-8 -*- from abjad import * def test_rhythmtreetools_RhythmTreeContainer_insert_01(): leaf_a = rhythmtreetools.RhythmTreeLeaf(preprolated_duration=3) leaf_b = rhythmtreetools.RhythmTreeLeaf(preprolated_duration=3) leaf_c = rhythmtreetools.RhythmTreeLeaf(preprolated_duration=2) container = rhythmtreetools.RhythmTreeContainer() assert container.children == () container.insert(0, leaf_a) assert container.children == (leaf_a,) container.insert(0, leaf_b) assert container.children == (leaf_b, leaf_a) container.insert(1, leaf_c) assert container.children == (leaf_b, leaf_c, leaf_a)
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/rhythmtreetools/test/test_rhythmtreetools_RhythmTreeContainer_insert.py
# -*- encoding: utf-8 -*- import six from abjad.tools.stringtools.strip_diacritics import strip_diacritics def to_accent_free_snake_case(string): '''Changes `string` to accent-free snake case. .. container:: example :: >>> stringtools.to_accent_free_snake_case('Déja vu') 'deja_vu' Strips accents from accented characters. Changes all punctuation (including spaces) to underscore. Sets to lowercase. Returns string. ''' assert isinstance(string, six.string_types) result = strip_diacritics(string) result = result.replace(' ', '_') result = result.replace("'", '_') result = result.lower() return result
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/stringtools/to_accent_free_snake_case.py
# -*- encoding: utf-8 -*- import copy from abjad import * def test_pitchtools_NamedPitch___copy___01(): pitch = NamedPitch(13) new = copy.copy(pitch) assert new is not pitch assert new.accidental is not pitch.accidental
# -*- encoding: utf-8 -*- import pytest from abjad import * def test_pitchtools_PitchArrayCell_previous_01(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert array[0][1].previous is array[0][0] def test_pitchtools_PitchArrayCell_previous_02(): array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]]) ''' [] [ ] [] [ ] [] [] ''' assert pytest.raises(IndexError, 'array[0][0].previous') def test_pitchtools_PitchArrayCell_previous_03(): cell = pitchtools.PitchArrayCell([NamedPitch(1)]) assert pytest.raises(IndexError, 'cell.previous')
mscuthbert/abjad
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_previous.py
abjad/tools/pitchtools/test/test_pitchtools_NamedPitch___copy__.py
"""Support for Volvo On Call.""" from datetime import timedelta import logging import voluptuous as vol from volvooncall import Connection from homeassistant.const import ( CONF_NAME, CONF_PASSWORD, CONF_REGION, CONF_RESOURCES, CONF_SCAN_INTERVAL, CONF_USERNAME, ) from homeassistant.helpers import discovery from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.util.dt import utcnow DOMAIN = "volvooncall" DATA_KEY = DOMAIN _LOGGER = logging.getLogger(__name__) MIN_UPDATE_INTERVAL = timedelta(minutes=1) DEFAULT_UPDATE_INTERVAL = timedelta(minutes=1) CONF_SERVICE_URL = "service_url" CONF_SCANDINAVIAN_MILES = "scandinavian_miles" CONF_MUTABLE = "mutable" SIGNAL_STATE_UPDATED = f"{DOMAIN}.updated" PLATFORMS = { "sensor": "sensor", "binary_sensor": "binary_sensor", "lock": "lock", "device_tracker": "device_tracker", "switch": "switch", } RESOURCES = [ "position", "lock", "heater", "odometer", "trip_meter1", "trip_meter2", "average_speed", "fuel_amount", "fuel_amount_level", "average_fuel_consumption", "distance_to_empty", "washer_fluid_level", "brake_fluid", "service_warning_status", "bulb_failures", "battery_range", "battery_level", "time_to_fully_charged", "battery_charge_status", "engine_start", "last_trip", "is_engine_running", "doors_hood_open", "doors_tailgate_open", "doors_front_left_door_open", "doors_front_right_door_open", "doors_rear_left_door_open", "doors_rear_right_door_open", "windows_front_left_window_open", "windows_front_right_window_open", "windows_rear_left_window_open", "windows_rear_right_window_open", "tyre_pressure_front_left_tyre_pressure", "tyre_pressure_front_right_tyre_pressure", "tyre_pressure_rear_left_tyre_pressure", "tyre_pressure_rear_right_tyre_pressure", "any_door_open", "any_window_open", ] CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional( CONF_SCAN_INTERVAL, default=DEFAULT_UPDATE_INTERVAL ): vol.All(cv.time_period, vol.Clamp(min=MIN_UPDATE_INTERVAL)), vol.Optional(CONF_NAME, default={}): cv.schema_with_slug_keys( cv.string ), vol.Optional(CONF_RESOURCES): vol.All( cv.ensure_list, [vol.In(RESOURCES)] ), vol.Optional(CONF_REGION): cv.string, vol.Optional(CONF_SERVICE_URL): cv.string, vol.Optional(CONF_MUTABLE, default=True): cv.boolean, vol.Optional(CONF_SCANDINAVIAN_MILES, default=False): cv.boolean, } ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the Volvo On Call component.""" session = async_get_clientsession(hass) connection = Connection( session=session, username=config[DOMAIN].get(CONF_USERNAME), password=config[DOMAIN].get(CONF_PASSWORD), service_url=config[DOMAIN].get(CONF_SERVICE_URL), region=config[DOMAIN].get(CONF_REGION), ) interval = config[DOMAIN][CONF_SCAN_INTERVAL] data = hass.data[DATA_KEY] = VolvoData(config) def is_enabled(attr): """Return true if the user has enabled the resource.""" return attr in config[DOMAIN].get(CONF_RESOURCES, [attr]) def discover_vehicle(vehicle): """Load relevant platforms.""" data.vehicles.add(vehicle.vin) dashboard = vehicle.dashboard( mutable=config[DOMAIN][CONF_MUTABLE], scandinavian_miles=config[DOMAIN][CONF_SCANDINAVIAN_MILES], ) for instrument in ( instrument for instrument in dashboard.instruments if instrument.component in PLATFORMS and is_enabled(instrument.slug_attr) ): data.instruments.add(instrument) hass.async_create_task( discovery.async_load_platform( hass, PLATFORMS[instrument.component], DOMAIN, (vehicle.vin, instrument.component, instrument.attr), config, ) ) async def update(now): """Update status from the online service.""" try: if not await connection.update(journal=True): _LOGGER.warning("Could not query server") return False for vehicle in connection.vehicles: if vehicle.vin not in data.vehicles: discover_vehicle(vehicle) async_dispatcher_send(hass, SIGNAL_STATE_UPDATED) return True finally: async_track_point_in_utc_time(hass, update, utcnow() + interval) _LOGGER.info("Logging in to service") return await update(utcnow()) class VolvoData: """Hold component state.""" def __init__(self, config): """Initialize the component state.""" self.vehicles = set() self.instruments = set() self.config = config[DOMAIN] self.names = self.config.get(CONF_NAME) def instrument(self, vin, component, attr): """Return corresponding instrument.""" return next( ( instrument for instrument in self.instruments if instrument.vehicle.vin == vin and instrument.component == component and instrument.attr == attr ), None, ) def vehicle_name(self, vehicle): """Provide a friendly name for a vehicle.""" if ( vehicle.registration_number and vehicle.registration_number.lower() ) in self.names: return self.names[vehicle.registration_number.lower()] if vehicle.vin and vehicle.vin.lower() in self.names: return self.names[vehicle.vin.lower()] if vehicle.registration_number: return vehicle.registration_number if vehicle.vin: return vehicle.vin return "" class VolvoEntity(Entity): """Base class for all VOC entities.""" def __init__(self, data, vin, component, attribute): """Initialize the entity.""" self.data = data self.vin = vin self.component = component self.attribute = attribute async def async_added_to_hass(self): """Register update dispatcher.""" self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_STATE_UPDATED, self.async_write_ha_state ) ) @property def instrument(self): """Return corresponding instrument.""" return self.data.instrument(self.vin, self.component, self.attribute) @property def icon(self): """Return the icon.""" return self.instrument.icon @property def vehicle(self): """Return vehicle.""" return self.instrument.vehicle @property def _entity_name(self): return self.instrument.name @property def _vehicle_name(self): return self.data.vehicle_name(self.vehicle) @property def name(self): """Return full name of the entity.""" return f"{self._vehicle_name} {self._entity_name}" @property def should_poll(self): """Return the polling state.""" return False @property def assumed_state(self): """Return true if unable to access real state of entity.""" return True @property def extra_state_attributes(self): """Return device specific state attributes.""" return dict( self.instrument.attributes, model=f"{self.vehicle.vehicle_type}/{self.vehicle.model_year}", ) @property def unique_id(self) -> str: """Return a unique ID.""" return f"{self.vin}-{self.component}-{self.attribute}"
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/volvooncall/__init__.py
"""Support for Verisure Smartplugs.""" from __future__ import annotations from time import monotonic from homeassistant.components.switch import SwitchEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import CONF_GIID, DOMAIN from .coordinator import VerisureDataUpdateCoordinator async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Verisure alarm control panel from a config entry.""" coordinator: VerisureDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] async_add_entities( VerisureSmartplug(coordinator, serial_number) for serial_number in coordinator.data["smart_plugs"] ) class VerisureSmartplug(CoordinatorEntity, SwitchEntity): """Representation of a Verisure smartplug.""" coordinator: VerisureDataUpdateCoordinator def __init__( self, coordinator: VerisureDataUpdateCoordinator, serial_number: str ) -> None: """Initialize the Verisure device.""" super().__init__(coordinator) self._attr_name = coordinator.data["smart_plugs"][serial_number]["area"] self._attr_unique_id = serial_number self.serial_number = serial_number self._change_timestamp = 0 self._state = False @property def device_info(self) -> DeviceInfo: """Return device information about this entity.""" area = self.coordinator.data["smart_plugs"][self.serial_number]["area"] return { "name": area, "suggested_area": area, "manufacturer": "Verisure", "model": "SmartPlug", "identifiers": {(DOMAIN, self.serial_number)}, "via_device": (DOMAIN, self.coordinator.entry.data[CONF_GIID]), } @property def is_on(self) -> bool: """Return true if on.""" if monotonic() - self._change_timestamp < 10: return self._state self._state = ( self.coordinator.data["smart_plugs"][self.serial_number]["currentState"] == "ON" ) return self._state @property def available(self) -> bool: """Return True if entity is available.""" return ( super().available and self.serial_number in self.coordinator.data["smart_plugs"] ) def turn_on(self, **kwargs) -> None: """Set smartplug status on.""" self.coordinator.verisure.set_smartplug_state(self.serial_number, True) self._state = True self._change_timestamp = monotonic() self.schedule_update_ha_state() def turn_off(self, **kwargs) -> None: """Set smartplug status off.""" self.coordinator.verisure.set_smartplug_state(self.serial_number, False) self._state = False self._change_timestamp = monotonic() self.schedule_update_ha_state()
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/verisure/switch.py
"""Support for WaterHeater devices of (EMEA/EU) Honeywell TCC systems.""" from __future__ import annotations import logging from homeassistant.components.water_heater import ( SUPPORT_AWAY_MODE, SUPPORT_OPERATION_MODE, WaterHeaterEntity, ) from homeassistant.const import PRECISION_TENTHS, PRECISION_WHOLE, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers.typing import ConfigType import homeassistant.util.dt as dt_util from . import EvoChild from .const import DOMAIN, EVO_FOLLOW, EVO_PERMOVER _LOGGER = logging.getLogger(__name__) STATE_AUTO = "auto" HA_STATE_TO_EVO = {STATE_AUTO: "", STATE_ON: "On", STATE_OFF: "Off"} EVO_STATE_TO_HA = {v: k for k, v in HA_STATE_TO_EVO.items() if k != ""} STATE_ATTRS_DHW = ["dhwId", "activeFaults", "stateStatus", "temperatureStatus"] async def async_setup_platform( hass: HomeAssistant, config: ConfigType, async_add_entities, discovery_info=None ) -> None: """Create a DHW controller.""" if discovery_info is None: return broker = hass.data[DOMAIN]["broker"] _LOGGER.debug( "Adding: DhwController (%s), id=%s", broker.tcs.hotwater.zone_type, broker.tcs.hotwater.zoneId, ) new_entity = EvoDHW(broker, broker.tcs.hotwater) async_add_entities([new_entity], update_before_add=True) class EvoDHW(EvoChild, WaterHeaterEntity): """Base for a Honeywell TCC DHW controller (aka boiler).""" def __init__(self, evo_broker, evo_device) -> None: """Initialize an evohome DHW controller.""" super().__init__(evo_broker, evo_device) self._unique_id = evo_device.dhwId self._name = "DHW controller" self._icon = "mdi:thermometer-lines" self._precision = PRECISION_TENTHS if evo_broker.client_v1 else PRECISION_WHOLE self._supported_features = SUPPORT_AWAY_MODE | SUPPORT_OPERATION_MODE @property def state(self): """Return the current state.""" return EVO_STATE_TO_HA[self._evo_device.stateStatus["state"]] @property def current_operation(self) -> str: """Return the current operating mode (Auto, On, or Off).""" if self._evo_device.stateStatus["mode"] == EVO_FOLLOW: return STATE_AUTO return EVO_STATE_TO_HA[self._evo_device.stateStatus["state"]] @property def operation_list(self) -> list[str]: """Return the list of available operations.""" return list(HA_STATE_TO_EVO) @property def is_away_mode_on(self): """Return True if away mode is on.""" is_off = EVO_STATE_TO_HA[self._evo_device.stateStatus["state"]] == STATE_OFF is_permanent = self._evo_device.stateStatus["mode"] == EVO_PERMOVER return is_off and is_permanent async def async_set_operation_mode(self, operation_mode: str) -> None: """Set new operation mode for a DHW controller. Except for Auto, the mode is only until the next SetPoint. """ if operation_mode == STATE_AUTO: await self._evo_broker.call_client_api(self._evo_device.set_dhw_auto()) else: await self._update_schedule() until = dt_util.parse_datetime(self.setpoints.get("next_sp_from", "")) until = dt_util.as_utc(until) if until else None if operation_mode == STATE_ON: await self._evo_broker.call_client_api( self._evo_device.set_dhw_on(until=until) ) else: # STATE_OFF await self._evo_broker.call_client_api( self._evo_device.set_dhw_off(until=until) ) async def async_turn_away_mode_on(self): """Turn away mode on.""" await self._evo_broker.call_client_api(self._evo_device.set_dhw_off()) async def async_turn_away_mode_off(self): """Turn away mode off.""" await self._evo_broker.call_client_api(self._evo_device.set_dhw_auto()) async def async_turn_on(self): """Turn on.""" await self._evo_broker.call_client_api(self._evo_device.set_dhw_on()) async def async_turn_off(self): """Turn off.""" await self._evo_broker.call_client_api(self._evo_device.set_dhw_off()) async def async_update(self) -> None: """Get the latest state data for a DHW controller.""" await super().async_update() for attr in STATE_ATTRS_DHW: self._device_state_attrs[attr] = getattr(self._evo_device, attr)
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/evohome/water_heater.py
"""Provides a binary sensor which gets its values from a TCP socket.""" from __future__ import annotations from typing import Any, Final from homeassistant.components.binary_sensor import ( PLATFORM_SCHEMA as PARENT_PLATFORM_SCHEMA, BinarySensorEntity, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from .common import TCP_PLATFORM_SCHEMA, TcpEntity from .const import CONF_VALUE_ON PLATFORM_SCHEMA: Final = PARENT_PLATFORM_SCHEMA.extend(TCP_PLATFORM_SCHEMA) def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: dict[str, Any] | None = None, ) -> None: """Set up the TCP binary sensor.""" add_entities([TcpBinarySensor(hass, config)]) class TcpBinarySensor(TcpEntity, BinarySensorEntity): """A binary sensor which is on when its state == CONF_VALUE_ON.""" @property def is_on(self) -> bool: """Return true if the binary sensor is on.""" return self._state == self._config[CONF_VALUE_ON]
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/tcp/binary_sensor.py
"""Support for Z-Wave switches.""" import time from homeassistant.components.switch import DOMAIN, SwitchEntity from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from . import ZWaveDeviceEntity, workaround async def async_setup_entry(hass, config_entry, async_add_entities): """Set up Z-Wave Switch from Config Entry.""" @callback def async_add_switch(switch): """Add Z-Wave Switch.""" async_add_entities([switch]) async_dispatcher_connect(hass, "zwave_new_switch", async_add_switch) def get_device(values, **kwargs): """Create zwave entity device.""" return ZwaveSwitch(values) class ZwaveSwitch(ZWaveDeviceEntity, SwitchEntity): """Representation of a Z-Wave switch.""" def __init__(self, values): """Initialize the Z-Wave switch device.""" ZWaveDeviceEntity.__init__(self, values, DOMAIN) self.refresh_on_update = ( workaround.get_device_mapping(values.primary) == workaround.WORKAROUND_REFRESH_NODE_ON_UPDATE ) self.last_update = time.perf_counter() self._state = self.values.primary.data def update_properties(self): """Handle data changes for node values.""" self._state = self.values.primary.data if self.refresh_on_update and time.perf_counter() - self.last_update > 30: self.last_update = time.perf_counter() self.node.request_state() @property def is_on(self): """Return true if device is on.""" return self._state def turn_on(self, **kwargs): """Turn the device on.""" self.node.set_switch(self.values.primary.value_id, True) def turn_off(self, **kwargs): """Turn the device off.""" self.node.set_switch(self.values.primary.value_id, False)
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/zwave/switch.py
"""Provides functionality to interact with fans.""" from __future__ import annotations from datetime import timedelta import functools as ft import logging import math from typing import final import voluptuous as vol from homeassistant.const import ( SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ON, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.config_validation import ( # noqa: F401 PLATFORM_SCHEMA, PLATFORM_SCHEMA_BASE, ) from homeassistant.helpers.entity import ToggleEntity from homeassistant.helpers.entity_component import EntityComponent from homeassistant.loader import bind_hass from homeassistant.util.percentage import ( ordered_list_item_to_percentage, percentage_to_ordered_list_item, percentage_to_ranged_value, ranged_value_to_percentage, ) _LOGGER = logging.getLogger(__name__) DOMAIN = "fan" SCAN_INTERVAL = timedelta(seconds=30) ENTITY_ID_FORMAT = DOMAIN + ".{}" # Bitfield of features supported by the fan entity SUPPORT_SET_SPEED = 1 SUPPORT_OSCILLATE = 2 SUPPORT_DIRECTION = 4 SUPPORT_PRESET_MODE = 8 SERVICE_SET_SPEED = "set_speed" SERVICE_INCREASE_SPEED = "increase_speed" SERVICE_DECREASE_SPEED = "decrease_speed" SERVICE_OSCILLATE = "oscillate" SERVICE_SET_DIRECTION = "set_direction" SERVICE_SET_PERCENTAGE = "set_percentage" SERVICE_SET_PRESET_MODE = "set_preset_mode" SPEED_OFF = "off" SPEED_LOW = "low" SPEED_MEDIUM = "medium" SPEED_HIGH = "high" DIRECTION_FORWARD = "forward" DIRECTION_REVERSE = "reverse" ATTR_SPEED = "speed" ATTR_PERCENTAGE = "percentage" ATTR_PERCENTAGE_STEP = "percentage_step" ATTR_SPEED_LIST = "speed_list" ATTR_OSCILLATING = "oscillating" ATTR_DIRECTION = "direction" ATTR_PRESET_MODE = "preset_mode" ATTR_PRESET_MODES = "preset_modes" # Invalid speeds do not conform to the entity model, but have crept # into core integrations at some point so we are temporarily # accommodating them in the transition to percentages. _NOT_SPEED_OFF = "off" _NOT_SPEED_ON = "on" _NOT_SPEED_AUTO = "auto" _NOT_SPEED_SMART = "smart" _NOT_SPEED_INTERVAL = "interval" _NOT_SPEED_IDLE = "idle" _NOT_SPEED_FAVORITE = "favorite" _NOT_SPEED_SLEEP = "sleep" _NOT_SPEED_SILENT = "silent" _NOT_SPEEDS_FILTER = { _NOT_SPEED_OFF, _NOT_SPEED_ON, _NOT_SPEED_AUTO, _NOT_SPEED_SMART, _NOT_SPEED_INTERVAL, _NOT_SPEED_IDLE, _NOT_SPEED_SILENT, _NOT_SPEED_SLEEP, _NOT_SPEED_FAVORITE, } _FAN_NATIVE = "_fan_native" OFF_SPEED_VALUES = [SPEED_OFF, None] LEGACY_SPEED_LIST = [SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH] class NoValidSpeedsError(ValueError): """Exception class when there are no valid speeds.""" class NotValidSpeedError(ValueError): """Exception class when the speed in not in the speed list.""" class NotValidPresetModeError(ValueError): """Exception class when the preset_mode in not in the preset_modes list.""" @bind_hass def is_on(hass, entity_id: str) -> bool: """Return if the fans are on based on the statemachine.""" state = hass.states.get(entity_id) if ATTR_SPEED in state.attributes: return state.attributes[ATTR_SPEED] not in OFF_SPEED_VALUES return state.state == STATE_ON async def async_setup(hass, config: dict): """Expose fan control via statemachine and services.""" component = hass.data[DOMAIN] = EntityComponent( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) # After the transition to percentage and preset_modes concludes, # switch this back to async_turn_on and remove async_turn_on_compat component.async_register_entity_service( SERVICE_TURN_ON, { vol.Optional(ATTR_SPEED): cv.string, vol.Optional(ATTR_PERCENTAGE): vol.All( vol.Coerce(int), vol.Range(min=0, max=100) ), vol.Optional(ATTR_PRESET_MODE): cv.string, }, "async_turn_on_compat", ) component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") # After the transition to percentage and preset_modes concludes, # remove this service component.async_register_entity_service( SERVICE_SET_SPEED, {vol.Required(ATTR_SPEED): cv.string}, "async_set_speed_deprecated", [SUPPORT_SET_SPEED], ) component.async_register_entity_service( SERVICE_INCREASE_SPEED, { vol.Optional(ATTR_PERCENTAGE_STEP): vol.All( vol.Coerce(int), vol.Range(min=0, max=100) ) }, "async_increase_speed", [SUPPORT_SET_SPEED], ) component.async_register_entity_service( SERVICE_DECREASE_SPEED, { vol.Optional(ATTR_PERCENTAGE_STEP): vol.All( vol.Coerce(int), vol.Range(min=0, max=100) ) }, "async_decrease_speed", [SUPPORT_SET_SPEED], ) component.async_register_entity_service( SERVICE_OSCILLATE, {vol.Required(ATTR_OSCILLATING): cv.boolean}, "async_oscillate", [SUPPORT_OSCILLATE], ) component.async_register_entity_service( SERVICE_SET_DIRECTION, {vol.Optional(ATTR_DIRECTION): cv.string}, "async_set_direction", [SUPPORT_DIRECTION], ) component.async_register_entity_service( SERVICE_SET_PERCENTAGE, { vol.Required(ATTR_PERCENTAGE): vol.All( vol.Coerce(int), vol.Range(min=0, max=100) ) }, "async_set_percentage", [SUPPORT_SET_SPEED], ) component.async_register_entity_service( SERVICE_SET_PRESET_MODE, {vol.Required(ATTR_PRESET_MODE): cv.string}, "async_set_preset_mode", [SUPPORT_SET_SPEED, SUPPORT_PRESET_MODE], ) return True async def async_setup_entry(hass, entry): """Set up a config entry.""" return await hass.data[DOMAIN].async_setup_entry(entry) async def async_unload_entry(hass, entry): """Unload a config entry.""" return await hass.data[DOMAIN].async_unload_entry(entry) def _fan_native(method): """Native fan method not overridden.""" setattr(method, _FAN_NATIVE, True) return method class FanEntity(ToggleEntity): """Base class for fan entities.""" @_fan_native def set_speed(self, speed: str) -> None: """Set the speed of the fan.""" raise NotImplementedError() async def async_set_speed_deprecated(self, speed: str): """Set the speed of the fan.""" _LOGGER.warning( "The fan.set_speed service is deprecated, use fan.set_percentage or fan.set_preset_mode instead" ) await self.async_set_speed(speed) @_fan_native async def async_set_speed(self, speed: str): """Set the speed of the fan.""" if speed == SPEED_OFF: await self.async_turn_off() return if speed in self.preset_modes: if not hasattr(self.async_set_preset_mode, _FAN_NATIVE): await self.async_set_preset_mode(speed) return if not hasattr(self.set_preset_mode, _FAN_NATIVE): await self.hass.async_add_executor_job(self.set_preset_mode, speed) return else: if not hasattr(self.async_set_percentage, _FAN_NATIVE): await self.async_set_percentage(self.speed_to_percentage(speed)) return if not hasattr(self.set_percentage, _FAN_NATIVE): await self.hass.async_add_executor_job( self.set_percentage, self.speed_to_percentage(speed) ) return await self.hass.async_add_executor_job(self.set_speed, speed) @_fan_native def set_percentage(self, percentage: int) -> None: """Set the speed of the fan, as a percentage.""" raise NotImplementedError() @_fan_native async def async_set_percentage(self, percentage: int) -> None: """Set the speed of the fan, as a percentage.""" if percentage == 0: await self.async_turn_off() elif not hasattr(self.set_percentage, _FAN_NATIVE): await self.hass.async_add_executor_job(self.set_percentage, percentage) else: await self.async_set_speed(self.percentage_to_speed(percentage)) async def async_increase_speed(self, percentage_step: int | None = None) -> None: """Increase the speed of the fan.""" await self._async_adjust_speed(1, percentage_step) async def async_decrease_speed(self, percentage_step: int | None = None) -> None: """Decrease the speed of the fan.""" await self._async_adjust_speed(-1, percentage_step) async def _async_adjust_speed( self, modifier: int, percentage_step: int | None ) -> None: """Increase or decrease the speed of the fan.""" current_percentage = self.percentage or 0 if percentage_step is not None: new_percentage = current_percentage + (percentage_step * modifier) else: speed_range = (1, self.speed_count) speed_index = math.ceil( percentage_to_ranged_value(speed_range, current_percentage) ) new_percentage = ranged_value_to_percentage( speed_range, speed_index + modifier ) new_percentage = max(0, min(100, new_percentage)) await self.async_set_percentage(new_percentage) @_fan_native def set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" self._valid_preset_mode_or_raise(preset_mode) self.set_speed(preset_mode) @_fan_native async def async_set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" if not hasattr(self.set_preset_mode, _FAN_NATIVE): await self.hass.async_add_executor_job(self.set_preset_mode, preset_mode) return self._valid_preset_mode_or_raise(preset_mode) await self.async_set_speed(preset_mode) def _valid_preset_mode_or_raise(self, preset_mode): """Raise NotValidPresetModeError on invalid preset_mode.""" preset_modes = self.preset_modes if preset_mode not in preset_modes: raise NotValidPresetModeError( f"The preset_mode {preset_mode} is not a valid preset_mode: {preset_modes}" ) def set_direction(self, direction: str) -> None: """Set the direction of the fan.""" raise NotImplementedError() async def async_set_direction(self, direction: str): """Set the direction of the fan.""" await self.hass.async_add_executor_job(self.set_direction, direction) # pylint: disable=arguments-differ def turn_on( self, speed: str | None = None, percentage: int | None = None, preset_mode: str | None = None, **kwargs, ) -> None: """Turn on the fan.""" raise NotImplementedError() async def async_turn_on_compat( self, speed: str | None = None, percentage: int | None = None, preset_mode: str | None = None, **kwargs, ) -> None: """Turn on the fan. This _compat version wraps async_turn_on with backwards and forward compatibility. After the transition to percentage and preset_modes concludes, it should be removed. """ if preset_mode is not None: self._valid_preset_mode_or_raise(preset_mode) speed = preset_mode percentage = None elif speed is not None: _LOGGER.warning( "Calling fan.turn_on with the speed argument is deprecated, use percentage or preset_mode instead" ) if speed in self.preset_modes: preset_mode = speed percentage = None else: percentage = self.speed_to_percentage(speed) elif percentage is not None: speed = self.percentage_to_speed(percentage) await self.async_turn_on( speed=speed, percentage=percentage, preset_mode=preset_mode, **kwargs, ) # pylint: disable=arguments-differ async def async_turn_on( self, speed: str | None = None, percentage: int | None = None, preset_mode: str | None = None, **kwargs, ) -> None: """Turn on the fan.""" if speed == SPEED_OFF: await self.async_turn_off() else: await self.hass.async_add_executor_job( ft.partial( self.turn_on, speed=speed, percentage=percentage, preset_mode=preset_mode, **kwargs, ) ) def oscillate(self, oscillating: bool) -> None: """Oscillate the fan.""" raise NotImplementedError() async def async_oscillate(self, oscillating: bool): """Oscillate the fan.""" await self.hass.async_add_executor_job(self.oscillate, oscillating) @property def is_on(self): """Return true if the entity is on.""" return self.speed not in [SPEED_OFF, None] @property def _implemented_percentage(self) -> bool: """Return true if percentage has been implemented.""" return not hasattr(self.set_percentage, _FAN_NATIVE) or not hasattr( self.async_set_percentage, _FAN_NATIVE ) @property def _implemented_preset_mode(self) -> bool: """Return true if preset_mode has been implemented.""" return not hasattr(self.set_preset_mode, _FAN_NATIVE) or not hasattr( self.async_set_preset_mode, _FAN_NATIVE ) @property def _implemented_speed(self) -> bool: """Return true if speed has been implemented.""" return not hasattr(self.set_speed, _FAN_NATIVE) or not hasattr( self.async_set_speed, _FAN_NATIVE ) @property def speed(self) -> str | None: """Return the current speed.""" if self._implemented_preset_mode: preset_mode = self.preset_mode if preset_mode: return preset_mode if self._implemented_percentage: percentage = self.percentage if percentage is None: return None return self.percentage_to_speed(percentage) return None @property def percentage(self) -> int | None: """Return the current speed as a percentage.""" if not self._implemented_preset_mode and self.speed in self.preset_modes: return None if not self._implemented_percentage: return self.speed_to_percentage(self.speed) return 0 @property def speed_count(self) -> int: """Return the number of speeds the fan supports.""" speed_list = speed_list_without_preset_modes(self.speed_list) if speed_list: return len(speed_list) return 100 @property def percentage_step(self) -> float: """Return the step size for percentage.""" return 100 / self.speed_count @property def speed_list(self) -> list: """Get the list of available speeds.""" speeds = [] if self._implemented_percentage: speeds += [SPEED_OFF, *LEGACY_SPEED_LIST] if self._implemented_preset_mode: speeds += self.preset_modes return speeds @property def current_direction(self) -> str | None: """Return the current direction of the fan.""" return None @property def oscillating(self): """Return whether or not the fan is currently oscillating.""" return None @property def capability_attributes(self): """Return capability attributes.""" attrs = {} if self.supported_features & SUPPORT_SET_SPEED: attrs[ATTR_SPEED_LIST] = self.speed_list if ( self.supported_features & SUPPORT_SET_SPEED or self.supported_features & SUPPORT_PRESET_MODE ): attrs[ATTR_PRESET_MODES] = self.preset_modes return attrs @property def _speed_list_without_preset_modes(self) -> list: """Return the speed list without preset modes. This property provides forward and backwards compatibility for conversion to percentage speeds. """ if not self._implemented_speed: return LEGACY_SPEED_LIST return speed_list_without_preset_modes(self.speed_list) def speed_to_percentage(self, speed: str) -> int: """ Map a speed to a percentage. Officially this should only have to deal with the 4 pre-defined speeds: return { SPEED_OFF: 0, SPEED_LOW: 33, SPEED_MEDIUM: 66, SPEED_HIGH: 100, }[speed] Unfortunately lots of fans make up their own speeds. So the default mapping is more dynamic. """ if speed in OFF_SPEED_VALUES: return 0 speed_list = self._speed_list_without_preset_modes if speed_list and speed not in speed_list: raise NotValidSpeedError(f"The speed {speed} is not a valid speed.") try: return ordered_list_item_to_percentage(speed_list, speed) except ValueError as ex: raise NoValidSpeedsError( f"The speed_list {speed_list} does not contain any valid speeds." ) from ex def percentage_to_speed(self, percentage: int) -> str: """ Map a percentage onto self.speed_list. Officially, this should only have to deal with 4 pre-defined speeds. if value == 0: return SPEED_OFF elif value <= 33: return SPEED_LOW elif value <= 66: return SPEED_MEDIUM else: return SPEED_HIGH Unfortunately there is currently a high degree of non-conformancy. Until fans have been corrected a more complicated and dynamic mapping is used. """ if percentage == 0: return SPEED_OFF speed_list = self._speed_list_without_preset_modes try: return percentage_to_ordered_list_item(speed_list, percentage) except ValueError as ex: raise NoValidSpeedsError( f"The speed_list {speed_list} does not contain any valid speeds." ) from ex @final @property def state_attributes(self) -> dict: """Return optional state attributes.""" data = {} supported_features = self.supported_features if supported_features & SUPPORT_DIRECTION: data[ATTR_DIRECTION] = self.current_direction if supported_features & SUPPORT_OSCILLATE: data[ATTR_OSCILLATING] = self.oscillating if supported_features & SUPPORT_SET_SPEED: data[ATTR_SPEED] = self.speed data[ATTR_PERCENTAGE] = self.percentage data[ATTR_PERCENTAGE_STEP] = self.percentage_step if ( supported_features & SUPPORT_PRESET_MODE or supported_features & SUPPORT_SET_SPEED ): data[ATTR_PRESET_MODE] = self.preset_mode return data @property def supported_features(self) -> int: """Flag supported features.""" return 0 @property def preset_mode(self) -> str | None: """Return the current preset mode, e.g., auto, smart, interval, favorite. Requires SUPPORT_SET_SPEED. """ speed = self.speed if speed in self.preset_modes: return speed return None @property def preset_modes(self) -> list[str] | None: """Return a list of available preset modes. Requires SUPPORT_SET_SPEED. """ return preset_modes_from_speed_list(self.speed_list) def speed_list_without_preset_modes(speed_list: list): """Filter out non-speeds from the speed list. The goal is to get the speeds in a list from lowest to highest by removing speeds that are not valid or out of order so we can map them to percentages. Examples: input: ["off", "low", "low-medium", "medium", "medium-high", "high", "auto"] output: ["low", "low-medium", "medium", "medium-high", "high"] input: ["off", "auto", "low", "medium", "high"] output: ["low", "medium", "high"] input: ["off", "1", "2", "3", "4", "5", "6", "7", "smart"] output: ["1", "2", "3", "4", "5", "6", "7"] input: ["Auto", "Silent", "Favorite", "Idle", "Medium", "High", "Strong"] output: ["Medium", "High", "Strong"] """ return [speed for speed in speed_list if speed.lower() not in _NOT_SPEEDS_FILTER] def preset_modes_from_speed_list(speed_list: list): """Filter out non-preset modes from the speed list. The goal is to return only preset modes. Examples: input: ["off", "low", "low-medium", "medium", "medium-high", "high", "auto"] output: ["auto"] input: ["off", "auto", "low", "medium", "high"] output: ["auto"] input: ["off", "1", "2", "3", "4", "5", "6", "7", "smart"] output: ["smart"] input: ["Auto", "Silent", "Favorite", "Idle", "Medium", "High", "Strong"] output: ["Auto", "Silent", "Favorite", "Idle"] """ return [ speed for speed in speed_list if speed.lower() in _NOT_SPEEDS_FILTER and speed.lower() != SPEED_OFF ]
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/fan/__init__.py
"""Support for tracking which astronomical or meteorological season it is.""" from datetime import datetime import logging import ephem import voluptuous as vol from homeassistant import util from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity from homeassistant.const import CONF_NAME, CONF_TYPE import homeassistant.helpers.config_validation as cv from homeassistant.util.dt import utcnow _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "Season" EQUATOR = "equator" NORTHERN = "northern" SOUTHERN = "southern" STATE_AUTUMN = "autumn" STATE_SPRING = "spring" STATE_SUMMER = "summer" STATE_WINTER = "winter" TYPE_ASTRONOMICAL = "astronomical" TYPE_METEOROLOGICAL = "meteorological" VALID_TYPES = [TYPE_ASTRONOMICAL, TYPE_METEOROLOGICAL] HEMISPHERE_SEASON_SWAP = { STATE_WINTER: STATE_SUMMER, STATE_SPRING: STATE_AUTUMN, STATE_AUTUMN: STATE_SPRING, STATE_SUMMER: STATE_WINTER, } SEASON_ICONS = { STATE_SPRING: "mdi:flower", STATE_SUMMER: "mdi:sunglasses", STATE_AUTUMN: "mdi:leaf", STATE_WINTER: "mdi:snowflake", } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_TYPE, default=TYPE_ASTRONOMICAL): vol.In(VALID_TYPES), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Display the current season.""" if None in (hass.config.latitude, hass.config.longitude): _LOGGER.error("Latitude or longitude not set in Home Assistant config") return False latitude = util.convert(hass.config.latitude, float) _type = config.get(CONF_TYPE) name = config.get(CONF_NAME) if latitude < 0: hemisphere = SOUTHERN elif latitude > 0: hemisphere = NORTHERN else: hemisphere = EQUATOR _LOGGER.debug(_type) add_entities([Season(hass, hemisphere, _type, name)], True) return True def get_season(date, hemisphere, season_tracking_type): """Calculate the current season.""" if hemisphere == "equator": return None if season_tracking_type == TYPE_ASTRONOMICAL: spring_start = ephem.next_equinox(str(date.year)).datetime() summer_start = ephem.next_solstice(str(date.year)).datetime() autumn_start = ephem.next_equinox(spring_start).datetime() winter_start = ephem.next_solstice(summer_start).datetime() else: spring_start = datetime(2017, 3, 1).replace(year=date.year) summer_start = spring_start.replace(month=6) autumn_start = spring_start.replace(month=9) winter_start = spring_start.replace(month=12) if spring_start <= date < summer_start: season = STATE_SPRING elif summer_start <= date < autumn_start: season = STATE_SUMMER elif autumn_start <= date < winter_start: season = STATE_AUTUMN elif winter_start <= date or spring_start > date: season = STATE_WINTER # If user is located in the southern hemisphere swap the season if hemisphere == NORTHERN: return season return HEMISPHERE_SEASON_SWAP.get(season) class Season(SensorEntity): """Representation of the current season.""" def __init__(self, hass, hemisphere, season_tracking_type, name): """Initialize the season.""" self.hass = hass self._name = name self.hemisphere = hemisphere self.datetime = None self.type = season_tracking_type self.season = None @property def name(self): """Return the name.""" return self._name @property def state(self): """Return the current season.""" return self.season @property def device_class(self): """Return the device class.""" return "season__season" @property def icon(self): """Icon to use in the frontend, if any.""" return SEASON_ICONS.get(self.season, "mdi:cloud") def update(self): """Update season.""" self.datetime = utcnow().replace(tzinfo=None) self.season = get_season(self.datetime, self.hemisphere, self.type)
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/season/sensor.py
"""Methods and classes related to executing Z-Wave commands and publishing these to hass.""" import logging from openzwavemqtt.const import ATTR_LABEL, ATTR_POSITION, ATTR_VALUE from openzwavemqtt.util.node import get_node_from_manager, set_config_parameter import voluptuous as vol from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from . import const _LOGGER = logging.getLogger(__name__) class ZWaveServices: """Class that holds our services ( Zwave Commands) that should be published to hass.""" def __init__(self, hass, manager): """Initialize with both hass and ozwmanager objects.""" self._hass = hass self._manager = manager @callback def async_register(self): """Register all our services.""" self._hass.services.async_register( const.DOMAIN, const.SERVICE_ADD_NODE, self.async_add_node, schema=vol.Schema( { vol.Optional(const.ATTR_INSTANCE_ID, default=1): vol.Coerce(int), vol.Optional(const.ATTR_SECURE, default=False): vol.Coerce(bool), } ), ) self._hass.services.async_register( const.DOMAIN, const.SERVICE_REMOVE_NODE, self.async_remove_node, schema=vol.Schema( {vol.Optional(const.ATTR_INSTANCE_ID, default=1): vol.Coerce(int)} ), ) self._hass.services.async_register( const.DOMAIN, const.SERVICE_CANCEL_COMMAND, self.async_cancel_command, schema=vol.Schema( {vol.Optional(const.ATTR_INSTANCE_ID, default=1): vol.Coerce(int)} ), ) self._hass.services.async_register( const.DOMAIN, const.SERVICE_SET_CONFIG_PARAMETER, self.async_set_config_parameter, schema=vol.Schema( { vol.Optional(const.ATTR_INSTANCE_ID, default=1): vol.Coerce(int), vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_VALUE): vol.Any( vol.All( cv.ensure_list, [ vol.All( { vol.Exclusive(ATTR_LABEL, "bit"): cv.string, vol.Exclusive(ATTR_POSITION, "bit"): vol.Coerce( int ), vol.Required(ATTR_VALUE): bool, }, cv.has_at_least_one_key(ATTR_LABEL, ATTR_POSITION), ) ], ), vol.Coerce(int), bool, cv.string, ), } ), ) @callback def async_set_config_parameter(self, service): """Set a config parameter to a node.""" instance_id = service.data[const.ATTR_INSTANCE_ID] node_id = service.data[const.ATTR_NODE_ID] param = service.data[const.ATTR_CONFIG_PARAMETER] selection = service.data[const.ATTR_CONFIG_VALUE] # These function calls may raise an exception but that's ok because # the exception will show in the UI to the user node = get_node_from_manager(self._manager, instance_id, node_id) payload = set_config_parameter(node, param, selection) _LOGGER.info( "Setting configuration parameter %s on Node %s with value %s", param, node_id, payload, ) @callback def async_add_node(self, service): """Enter inclusion mode on the controller.""" instance_id = service.data[const.ATTR_INSTANCE_ID] secure = service.data[const.ATTR_SECURE] instance = self._manager.get_instance(instance_id) if instance is None: raise ValueError(f"No OpenZWave Instance with ID {instance_id}") instance.add_node(secure) @callback def async_remove_node(self, service): """Enter exclusion mode on the controller.""" instance_id = service.data[const.ATTR_INSTANCE_ID] instance = self._manager.get_instance(instance_id) if instance is None: raise ValueError(f"No OpenZWave Instance with ID {instance_id}") instance.remove_node() @callback def async_cancel_command(self, service): """Tell the controller to cancel an add or remove command.""" instance_id = service.data[const.ATTR_INSTANCE_ID] instance = self._manager.get_instance(instance_id) if instance is None: raise ValueError(f"No OpenZWave Instance with ID {instance_id}") instance.cancel_controller_command()
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/ozw/services.py
"""Config flow utilities.""" from collections import OrderedDict from pyvesync import VeSync import voluptuous as vol from homeassistant import config_entries from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback from .const import DOMAIN class VeSyncFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 def __init__(self): """Instantiate config flow.""" self._username = None self._password = None self.data_schema = OrderedDict() self.data_schema[vol.Required(CONF_USERNAME)] = str self.data_schema[vol.Required(CONF_PASSWORD)] = str @callback def _show_form(self, errors=None): """Show form to the user.""" return self.async_show_form( step_id="user", data_schema=vol.Schema(self.data_schema), errors=errors if errors else {}, ) async def async_step_import(self, import_config): """Handle external yaml configuration.""" return await self.async_step_user(import_config) async def async_step_user(self, user_input=None): """Handle a flow start.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") if not user_input: return self._show_form() self._username = user_input[CONF_USERNAME] self._password = user_input[CONF_PASSWORD] manager = VeSync(self._username, self._password) login = await self.hass.async_add_executor_job(manager.login) if not login: return self._show_form(errors={"base": "invalid_auth"}) return self.async_create_entry( title=self._username, data={CONF_USERNAME: self._username, CONF_PASSWORD: self._password}, )
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/vesync/config_flow.py
"""Component to interface with cameras.""" from __future__ import annotations import asyncio import base64 import collections from collections.abc import Awaitable, Mapping from contextlib import suppress from datetime import datetime, timedelta import hashlib import logging import os from random import SystemRandom from typing import Callable, Final, cast, final from aiohttp import web import async_timeout import attr import voluptuous as vol from homeassistant.components import websocket_api from homeassistant.components.http import KEY_AUTHENTICATED, HomeAssistantView from homeassistant.components.media_player.const import ( ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_EXTRA, DOMAIN as DOMAIN_MP, SERVICE_PLAY_MEDIA, ) from homeassistant.components.stream import Stream, create_stream from homeassistant.components.stream.const import FORMAT_CONTENT_TYPE, OUTPUT_FORMATS from homeassistant.components.websocket_api import ActiveConnection from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_ENTITY_ID, CONF_FILENAME, CONTENT_TYPE_MULTIPART, EVENT_HOMEASSISTANT_START, SERVICE_TURN_OFF, SERVICE_TURN_ON, ) from homeassistant.core import Event, HomeAssistant, ServiceCall, callback from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.config_validation import ( # noqa: F401 PLATFORM_SCHEMA, PLATFORM_SCHEMA_BASE, ) from homeassistant.helpers.entity import Entity, entity_sources from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.network import get_url from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass from .const import ( CAMERA_IMAGE_TIMEOUT, CAMERA_STREAM_SOURCE_TIMEOUT, CONF_DURATION, CONF_LOOKBACK, DATA_CAMERA_PREFS, DOMAIN, SERVICE_RECORD, ) from .prefs import CameraPreferences # mypy: allow-untyped-calls _LOGGER = logging.getLogger(__name__) SERVICE_ENABLE_MOTION: Final = "enable_motion_detection" SERVICE_DISABLE_MOTION: Final = "disable_motion_detection" SERVICE_SNAPSHOT: Final = "snapshot" SERVICE_PLAY_STREAM: Final = "play_stream" SCAN_INTERVAL: Final = timedelta(seconds=30) ENTITY_ID_FORMAT: Final = DOMAIN + ".{}" ATTR_FILENAME: Final = "filename" ATTR_MEDIA_PLAYER: Final = "media_player" ATTR_FORMAT: Final = "format" STATE_RECORDING: Final = "recording" STATE_STREAMING: Final = "streaming" STATE_IDLE: Final = "idle" # Bitfield of features supported by the camera entity SUPPORT_ON_OFF: Final = 1 SUPPORT_STREAM: Final = 2 DEFAULT_CONTENT_TYPE: Final = "image/jpeg" ENTITY_IMAGE_URL: Final = "/api/camera_proxy/{0}?token={1}" TOKEN_CHANGE_INTERVAL: Final = timedelta(minutes=5) _RND: Final = SystemRandom() MIN_STREAM_INTERVAL: Final = 0.5 # seconds CAMERA_SERVICE_SNAPSHOT: Final = {vol.Required(ATTR_FILENAME): cv.template} CAMERA_SERVICE_PLAY_STREAM: Final = { vol.Required(ATTR_MEDIA_PLAYER): cv.entities_domain(DOMAIN_MP), vol.Optional(ATTR_FORMAT, default="hls"): vol.In(OUTPUT_FORMATS), } CAMERA_SERVICE_RECORD: Final = { vol.Required(CONF_FILENAME): cv.template, vol.Optional(CONF_DURATION, default=30): vol.Coerce(int), vol.Optional(CONF_LOOKBACK, default=0): vol.Coerce(int), } WS_TYPE_CAMERA_THUMBNAIL: Final = "camera_thumbnail" SCHEMA_WS_CAMERA_THUMBNAIL: Final = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend( { vol.Required("type"): WS_TYPE_CAMERA_THUMBNAIL, vol.Required("entity_id"): cv.entity_id, } ) @attr.s class Image: """Represent an image.""" content_type: str = attr.ib() content: bytes = attr.ib() @bind_hass async def async_request_stream(hass: HomeAssistant, entity_id: str, fmt: str) -> str: """Request a stream for a camera entity.""" camera = _get_camera_from_entity_id(hass, entity_id) return await _async_stream_endpoint_url(hass, camera, fmt) @bind_hass async def async_get_image( hass: HomeAssistant, entity_id: str, timeout: int = 10 ) -> Image: """Fetch an image from a camera entity.""" camera = _get_camera_from_entity_id(hass, entity_id) with suppress(asyncio.CancelledError, asyncio.TimeoutError): async with async_timeout.timeout(timeout): image = await camera.async_camera_image() if image: return Image(camera.content_type, image) raise HomeAssistantError("Unable to get image") @bind_hass async def async_get_stream_source(hass: HomeAssistant, entity_id: str) -> str | None: """Fetch the stream source for a camera entity.""" camera = _get_camera_from_entity_id(hass, entity_id) return await camera.stream_source() @bind_hass async def async_get_mjpeg_stream( hass: HomeAssistant, request: web.Request, entity_id: str ) -> web.StreamResponse | None: """Fetch an mjpeg stream from a camera entity.""" camera = _get_camera_from_entity_id(hass, entity_id) return await camera.handle_async_mjpeg_stream(request) async def async_get_still_stream( request: web.Request, image_cb: Callable[[], Awaitable[bytes | None]], content_type: str, interval: float, ) -> web.StreamResponse: """Generate an HTTP MJPEG stream from camera images. This method must be run in the event loop. """ response = web.StreamResponse() response.content_type = CONTENT_TYPE_MULTIPART.format("--frameboundary") await response.prepare(request) async def write_to_mjpeg_stream(img_bytes: bytes) -> None: """Write image to stream.""" await response.write( bytes( "--frameboundary\r\n" "Content-Type: {}\r\n" "Content-Length: {}\r\n\r\n".format(content_type, len(img_bytes)), "utf-8", ) + img_bytes + b"\r\n" ) last_image = None while True: img_bytes = await image_cb() if not img_bytes: break if img_bytes != last_image: await write_to_mjpeg_stream(img_bytes) # Chrome seems to always ignore first picture, # print it twice. if last_image is None: await write_to_mjpeg_stream(img_bytes) last_image = img_bytes await asyncio.sleep(interval) return response def _get_camera_from_entity_id(hass: HomeAssistant, entity_id: str) -> Camera: """Get camera component from entity_id.""" component = hass.data.get(DOMAIN) if component is None: raise HomeAssistantError("Camera integration not set up") camera = component.get_entity(entity_id) if camera is None: raise HomeAssistantError("Camera not found") if not camera.is_on: raise HomeAssistantError("Camera is off") return cast(Camera, camera) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the camera component.""" component = hass.data[DOMAIN] = EntityComponent( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) prefs = CameraPreferences(hass) await prefs.async_initialize() hass.data[DATA_CAMERA_PREFS] = prefs hass.http.register_view(CameraImageView(component)) hass.http.register_view(CameraMjpegStream(component)) hass.components.websocket_api.async_register_command( WS_TYPE_CAMERA_THUMBNAIL, websocket_camera_thumbnail, SCHEMA_WS_CAMERA_THUMBNAIL ) hass.components.websocket_api.async_register_command(ws_camera_stream) hass.components.websocket_api.async_register_command(websocket_get_prefs) hass.components.websocket_api.async_register_command(websocket_update_prefs) await component.async_setup(config) async def preload_stream(_event: Event) -> None: for camera in component.entities: camera = cast(Camera, camera) camera_prefs = prefs.get(camera.entity_id) if not camera_prefs.preload_stream: continue stream = await camera.create_stream() if not stream: continue stream.keepalive = True stream.add_provider("hls") stream.start() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, preload_stream) @callback def update_tokens(time: datetime) -> None: """Update tokens of the entities.""" for entity in component.entities: entity = cast(Camera, entity) entity.async_update_token() entity.async_write_ha_state() hass.helpers.event.async_track_time_interval(update_tokens, TOKEN_CHANGE_INTERVAL) component.async_register_entity_service( SERVICE_ENABLE_MOTION, {}, "async_enable_motion_detection" ) component.async_register_entity_service( SERVICE_DISABLE_MOTION, {}, "async_disable_motion_detection" ) component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") component.async_register_entity_service( SERVICE_SNAPSHOT, CAMERA_SERVICE_SNAPSHOT, async_handle_snapshot_service ) component.async_register_entity_service( SERVICE_PLAY_STREAM, CAMERA_SERVICE_PLAY_STREAM, async_handle_play_stream_service, ) component.async_register_entity_service( SERVICE_RECORD, CAMERA_SERVICE_RECORD, async_handle_record_service ) return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" component: EntityComponent = hass.data[DOMAIN] return await component.async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" component: EntityComponent = hass.data[DOMAIN] return await component.async_unload_entry(entry) class Camera(Entity): """The base class for camera entities.""" def __init__(self) -> None: """Initialize a camera.""" self.is_streaming: bool = False self.stream: Stream | None = None self.stream_options: dict[str, str] = {} self.content_type: str = DEFAULT_CONTENT_TYPE self.access_tokens: collections.deque = collections.deque([], 2) self.async_update_token() @property def should_poll(self) -> bool: """No need to poll cameras.""" return False @property def entity_picture(self) -> str: """Return a link to the camera feed as entity picture.""" return ENTITY_IMAGE_URL.format(self.entity_id, self.access_tokens[-1]) @property def supported_features(self) -> int: """Flag supported features.""" return 0 @property def is_recording(self) -> bool: """Return true if the device is recording.""" return False @property def brand(self) -> str | None: """Return the camera brand.""" return None @property def motion_detection_enabled(self) -> bool: """Return the camera motion detection status.""" return False @property def model(self) -> str | None: """Return the camera model.""" return None @property def frame_interval(self) -> float: """Return the interval between frames of the mjpeg stream.""" return MIN_STREAM_INTERVAL async def create_stream(self) -> Stream | None: """Create a Stream for stream_source.""" # There is at most one stream (a decode worker) per camera if not self.stream: async with async_timeout.timeout(CAMERA_STREAM_SOURCE_TIMEOUT): source = await self.stream_source() if not source: return None self.stream = create_stream(self.hass, source, options=self.stream_options) return self.stream async def stream_source(self) -> str | None: """Return the source of the stream.""" return None def camera_image(self) -> bytes | None: """Return bytes of camera image.""" raise NotImplementedError() async def async_camera_image(self) -> bytes | None: """Return bytes of camera image.""" return await self.hass.async_add_executor_job(self.camera_image) async def handle_async_still_stream( self, request: web.Request, interval: float ) -> web.StreamResponse: """Generate an HTTP MJPEG stream from camera images.""" return await async_get_still_stream( request, self.async_camera_image, self.content_type, interval ) async def handle_async_mjpeg_stream( self, request: web.Request ) -> web.StreamResponse | None: """Serve an HTTP MJPEG stream from the camera. This method can be overridden by camera platforms to proxy a direct stream from the camera. """ return await self.handle_async_still_stream(request, self.frame_interval) @property def state(self) -> str: """Return the camera state.""" if self.is_recording: return STATE_RECORDING if self.is_streaming: return STATE_STREAMING return STATE_IDLE @property def is_on(self) -> bool: """Return true if on.""" return True def turn_off(self) -> None: """Turn off camera.""" raise NotImplementedError() async def async_turn_off(self) -> None: """Turn off camera.""" await self.hass.async_add_executor_job(self.turn_off) def turn_on(self) -> None: """Turn off camera.""" raise NotImplementedError() async def async_turn_on(self) -> None: """Turn off camera.""" await self.hass.async_add_executor_job(self.turn_on) def enable_motion_detection(self) -> None: """Enable motion detection in the camera.""" raise NotImplementedError() async def async_enable_motion_detection(self) -> None: """Call the job and enable motion detection.""" await self.hass.async_add_executor_job(self.enable_motion_detection) def disable_motion_detection(self) -> None: """Disable motion detection in camera.""" raise NotImplementedError() async def async_disable_motion_detection(self) -> None: """Call the job and disable motion detection.""" await self.hass.async_add_executor_job(self.disable_motion_detection) @final @property def state_attributes(self) -> dict[str, str | None]: """Return the camera state attributes.""" attrs = {"access_token": self.access_tokens[-1]} if self.model: attrs["model_name"] = self.model if self.brand: attrs["brand"] = self.brand if self.motion_detection_enabled: attrs["motion_detection"] = self.motion_detection_enabled return attrs @callback def async_update_token(self) -> None: """Update the used token.""" self.access_tokens.append( hashlib.sha256(_RND.getrandbits(256).to_bytes(32, "little")).hexdigest() ) class CameraView(HomeAssistantView): """Base CameraView.""" requires_auth = False def __init__(self, component: EntityComponent) -> None: """Initialize a basic camera view.""" self.component = component async def get(self, request: web.Request, entity_id: str) -> web.StreamResponse: """Start a GET request.""" camera = self.component.get_entity(entity_id) if camera is None: raise web.HTTPNotFound() camera = cast(Camera, camera) authenticated = ( request[KEY_AUTHENTICATED] or request.query.get("token") in camera.access_tokens ) if not authenticated: raise web.HTTPUnauthorized() if not camera.is_on: _LOGGER.debug("Camera is off") raise web.HTTPServiceUnavailable() return await self.handle(request, camera) async def handle(self, request: web.Request, camera: Camera) -> web.StreamResponse: """Handle the camera request.""" raise NotImplementedError() class CameraImageView(CameraView): """Camera view to serve an image.""" url = "/api/camera_proxy/{entity_id}" name = "api:camera:image" async def handle(self, request: web.Request, camera: Camera) -> web.Response: """Serve camera image.""" with suppress(asyncio.CancelledError, asyncio.TimeoutError): async with async_timeout.timeout(CAMERA_IMAGE_TIMEOUT): image = await camera.async_camera_image() if image: return web.Response(body=image, content_type=camera.content_type) raise web.HTTPInternalServerError() class CameraMjpegStream(CameraView): """Camera View to serve an MJPEG stream.""" url = "/api/camera_proxy_stream/{entity_id}" name = "api:camera:stream" async def handle(self, request: web.Request, camera: Camera) -> web.StreamResponse: """Serve camera stream, possibly with interval.""" interval_str = request.query.get("interval") if interval_str is None: stream = await camera.handle_async_mjpeg_stream(request) if stream is None: raise web.HTTPBadGateway() return stream try: # Compose camera stream from stills interval = float(interval_str) if interval < MIN_STREAM_INTERVAL: raise ValueError(f"Stream interval must be be > {MIN_STREAM_INTERVAL}") return await camera.handle_async_still_stream(request, interval) except ValueError as err: raise web.HTTPBadRequest() from err @websocket_api.async_response async def websocket_camera_thumbnail( hass: HomeAssistant, connection: ActiveConnection, msg: dict ) -> None: """Handle get camera thumbnail websocket command. Async friendly. """ _LOGGER.warning("The websocket command 'camera_thumbnail' has been deprecated") try: image = await async_get_image(hass, msg["entity_id"]) await connection.send_big_result( msg["id"], { "content_type": image.content_type, "content": base64.b64encode(image.content).decode("utf-8"), }, ) except HomeAssistantError: connection.send_message( websocket_api.error_message( msg["id"], "image_fetch_failed", "Unable to fetch image" ) ) @websocket_api.websocket_command( { vol.Required("type"): "camera/stream", vol.Required("entity_id"): cv.entity_id, vol.Optional("format", default="hls"): vol.In(OUTPUT_FORMATS), } ) @websocket_api.async_response async def ws_camera_stream( hass: HomeAssistant, connection: ActiveConnection, msg: dict ) -> None: """Handle get camera stream websocket command. Async friendly. """ try: entity_id = msg["entity_id"] camera = _get_camera_from_entity_id(hass, entity_id) url = await _async_stream_endpoint_url(hass, camera, fmt=msg["format"]) connection.send_result(msg["id"], {"url": url}) except HomeAssistantError as ex: _LOGGER.error("Error requesting stream: %s", ex) connection.send_error(msg["id"], "start_stream_failed", str(ex)) except asyncio.TimeoutError: _LOGGER.error("Timeout getting stream source") connection.send_error( msg["id"], "start_stream_failed", "Timeout getting stream source" ) @websocket_api.websocket_command( {vol.Required("type"): "camera/get_prefs", vol.Required("entity_id"): cv.entity_id} ) @websocket_api.async_response async def websocket_get_prefs( hass: HomeAssistant, connection: ActiveConnection, msg: dict ) -> None: """Handle request for account info.""" prefs = hass.data[DATA_CAMERA_PREFS].get(msg["entity_id"]) connection.send_result(msg["id"], prefs.as_dict()) @websocket_api.websocket_command( { vol.Required("type"): "camera/update_prefs", vol.Required("entity_id"): cv.entity_id, vol.Optional("preload_stream"): bool, } ) @websocket_api.async_response async def websocket_update_prefs( hass: HomeAssistant, connection: ActiveConnection, msg: dict ) -> None: """Handle request for account info.""" prefs = hass.data[DATA_CAMERA_PREFS] changes = dict(msg) changes.pop("id") changes.pop("type") entity_id = changes.pop("entity_id") await prefs.async_update(entity_id, **changes) connection.send_result(msg["id"], prefs.get(entity_id).as_dict()) async def async_handle_snapshot_service( camera: Camera, service_call: ServiceCall ) -> None: """Handle snapshot services calls.""" hass = camera.hass filename = service_call.data[ATTR_FILENAME] filename.hass = hass snapshot_file = filename.async_render(variables={ATTR_ENTITY_ID: camera}) # check if we allow to access to that file if not hass.config.is_allowed_path(snapshot_file): _LOGGER.error("Can't write %s, no access to path!", snapshot_file) return image = await camera.async_camera_image() def _write_image(to_file: str, image_data: bytes | None) -> None: """Executor helper to write image.""" if image_data is None: return if not os.path.exists(os.path.dirname(to_file)): os.makedirs(os.path.dirname(to_file), exist_ok=True) with open(to_file, "wb") as img_file: img_file.write(image_data) try: await hass.async_add_executor_job(_write_image, snapshot_file, image) except OSError as err: _LOGGER.error("Can't write image to file: %s", err) async def async_handle_play_stream_service( camera: Camera, service_call: ServiceCall ) -> None: """Handle play stream services calls.""" fmt = service_call.data[ATTR_FORMAT] url = await _async_stream_endpoint_url(camera.hass, camera, fmt) hass = camera.hass data: Mapping[str, str] = { ATTR_MEDIA_CONTENT_ID: f"{get_url(hass)}{url}", ATTR_MEDIA_CONTENT_TYPE: FORMAT_CONTENT_TYPE[fmt], } # It is required to send a different payload for cast media players entity_ids = service_call.data[ATTR_MEDIA_PLAYER] sources = entity_sources(hass) cast_entity_ids = [ entity for entity in entity_ids # All entities should be in sources. This extra guard is to # avoid people writing to the state machine and breaking it. if entity in sources and sources[entity]["domain"] == "cast" ] other_entity_ids = list(set(entity_ids) - set(cast_entity_ids)) if cast_entity_ids: await hass.services.async_call( DOMAIN_MP, SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: cast_entity_ids, **data, ATTR_MEDIA_EXTRA: { "stream_type": "LIVE", "media_info": { "hlsVideoSegmentFormat": "fmp4", }, }, }, blocking=True, context=service_call.context, ) if other_entity_ids: await hass.services.async_call( DOMAIN_MP, SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: other_entity_ids, **data, }, blocking=True, context=service_call.context, ) async def _async_stream_endpoint_url( hass: HomeAssistant, camera: Camera, fmt: str ) -> str: stream = await camera.create_stream() if not stream: raise HomeAssistantError( f"{camera.entity_id} does not support play stream service" ) # Update keepalive setting which manages idle shutdown camera_prefs = hass.data[DATA_CAMERA_PREFS].get(camera.entity_id) stream.keepalive = camera_prefs.preload_stream stream.add_provider(fmt) stream.start() return stream.endpoint_url(fmt) async def async_handle_record_service( camera: Camera, service_call: ServiceCall ) -> None: """Handle stream recording service calls.""" stream = await camera.create_stream() if not stream: raise HomeAssistantError(f"{camera.entity_id} does not support record service") hass = camera.hass filename = service_call.data[CONF_FILENAME] filename.hass = hass video_path = filename.async_render(variables={ATTR_ENTITY_ID: camera}) await stream.async_record( video_path, duration=service_call.data[CONF_DURATION], lookback=service_call.data[CONF_LOOKBACK], )
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/camera/__init__.py
"""Constants for the devolo_home_control integration.""" import re DOMAIN = "devolo_home_control" DEFAULT_MYDEVOLO = "https://www.mydevolo.com" PLATFORMS = ["binary_sensor", "climate", "cover", "light", "sensor", "switch"] CONF_MYDEVOLO = "mydevolo_url" GATEWAY_SERIAL_PATTERN = re.compile(r"\d{16}") SUPPORTED_MODEL_TYPES = ["2600", "2601"]
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/devolo_home_control/const.py
"""Flock platform for notify component.""" import asyncio import logging import async_timeout import voluptuous as vol from homeassistant.components.notify import PLATFORM_SCHEMA, BaseNotificationService from homeassistant.const import CONF_ACCESS_TOKEN, HTTP_OK from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) _RESOURCE = "https://api.flock.com/hooks/sendMessage/" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_ACCESS_TOKEN): cv.string}) async def async_get_service(hass, config, discovery_info=None): """Get the Flock notification service.""" access_token = config.get(CONF_ACCESS_TOKEN) url = f"{_RESOURCE}{access_token}" session = async_get_clientsession(hass) return FlockNotificationService(url, session) class FlockNotificationService(BaseNotificationService): """Implement the notification service for Flock.""" def __init__(self, url, session): """Initialize the Flock notification service.""" self._url = url self._session = session async def async_send_message(self, message, **kwargs): """Send the message to the user.""" payload = {"text": message} _LOGGER.debug("Attempting to call Flock at %s", self._url) try: with async_timeout.timeout(10): response = await self._session.post(self._url, json=payload) result = await response.json() if response.status != HTTP_OK or "error" in result: _LOGGER.error( "Flock service returned HTTP status %d, response %s", response.status, result, ) except asyncio.TimeoutError: _LOGGER.error("Timeout accessing Flock at %s", self._url)
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/flock/notify.py
"""Support for the yandex speechkit tts service.""" import asyncio import logging import aiohttp import async_timeout import voluptuous as vol from homeassistant.components.tts import CONF_LANG, PLATFORM_SCHEMA, Provider from homeassistant.const import CONF_API_KEY, HTTP_OK from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) YANDEX_API_URL = "https://tts.voicetech.yandex.net/generate?" SUPPORT_LANGUAGES = ["ru-RU", "en-US", "tr-TR", "uk-UK"] SUPPORT_CODECS = ["mp3", "wav", "opus"] SUPPORT_VOICES = [ "jane", "oksana", "alyss", "omazh", "zahar", "ermil", "levitan", "ermilov", "silaerkan", "kolya", "kostya", "nastya", "sasha", "nick", "erkanyavas", "zhenya", "tanya", "anton_samokhvalov", "tatyana_abramova", "voicesearch", "ermil_with_tuning", "robot", "dude", "zombie", "smoky", ] SUPPORTED_EMOTION = ["good", "evil", "neutral"] MIN_SPEED = 0.1 MAX_SPEED = 3 CONF_CODEC = "codec" CONF_VOICE = "voice" CONF_EMOTION = "emotion" CONF_SPEED = "speed" DEFAULT_LANG = "en-US" DEFAULT_CODEC = "mp3" DEFAULT_VOICE = "zahar" DEFAULT_EMOTION = "neutral" DEFAULT_SPEED = 1 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORT_LANGUAGES), vol.Optional(CONF_CODEC, default=DEFAULT_CODEC): vol.In(SUPPORT_CODECS), vol.Optional(CONF_VOICE, default=DEFAULT_VOICE): vol.In(SUPPORT_VOICES), vol.Optional(CONF_EMOTION, default=DEFAULT_EMOTION): vol.In(SUPPORTED_EMOTION), vol.Optional(CONF_SPEED, default=DEFAULT_SPEED): vol.Range( min=MIN_SPEED, max=MAX_SPEED ), } ) SUPPORTED_OPTIONS = [CONF_CODEC, CONF_VOICE, CONF_EMOTION, CONF_SPEED] async def async_get_engine(hass, config, discovery_info=None): """Set up VoiceRSS speech component.""" return YandexSpeechKitProvider(hass, config) class YandexSpeechKitProvider(Provider): """VoiceRSS speech api provider.""" def __init__(self, hass, conf): """Init VoiceRSS TTS service.""" self.hass = hass self._codec = conf.get(CONF_CODEC) self._key = conf.get(CONF_API_KEY) self._speaker = conf.get(CONF_VOICE) self._language = conf.get(CONF_LANG) self._emotion = conf.get(CONF_EMOTION) self._speed = str(conf.get(CONF_SPEED)) self.name = "YandexTTS" @property def default_language(self): """Return the default language.""" return self._language @property def supported_languages(self): """Return list of supported languages.""" return SUPPORT_LANGUAGES @property def supported_options(self): """Return list of supported options.""" return SUPPORTED_OPTIONS async def async_get_tts_audio(self, message, language, options=None): """Load TTS from yandex.""" websession = async_get_clientsession(self.hass) actual_language = language options = options or {} try: with async_timeout.timeout(10): url_param = { "text": message, "lang": actual_language, "key": self._key, "speaker": options.get(CONF_VOICE, self._speaker), "format": options.get(CONF_CODEC, self._codec), "emotion": options.get(CONF_EMOTION, self._emotion), "speed": options.get(CONF_SPEED, self._speed), } request = await websession.get(YANDEX_API_URL, params=url_param) if request.status != HTTP_OK: _LOGGER.error( "Error %d on load URL %s", request.status, request.url ) return (None, None) data = await request.read() except (asyncio.TimeoutError, aiohttp.ClientError): _LOGGER.error("Timeout for yandex speech kit API") return (None, None) return (self._codec, data)
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/yandextts/tts.py
"""Config flow for AlarmDecoder.""" import logging from adext import AdExt from alarmdecoder.devices import SerialDevice, SocketDevice from alarmdecoder.util import NoDeviceError import voluptuous as vol from homeassistant import config_entries from homeassistant.components.binary_sensor import DEVICE_CLASSES from homeassistant.const import CONF_HOST, CONF_PORT, CONF_PROTOCOL from homeassistant.core import callback from .const import ( CONF_ALT_NIGHT_MODE, CONF_AUTO_BYPASS, CONF_CODE_ARM_REQUIRED, CONF_DEVICE_BAUD, CONF_DEVICE_PATH, CONF_RELAY_ADDR, CONF_RELAY_CHAN, CONF_ZONE_LOOP, CONF_ZONE_NAME, CONF_ZONE_NUMBER, CONF_ZONE_RFID, CONF_ZONE_TYPE, DEFAULT_ARM_OPTIONS, DEFAULT_DEVICE_BAUD, DEFAULT_DEVICE_HOST, DEFAULT_DEVICE_PATH, DEFAULT_DEVICE_PORT, DEFAULT_ZONE_OPTIONS, DEFAULT_ZONE_TYPE, DOMAIN, OPTIONS_ARM, OPTIONS_ZONES, PROTOCOL_SERIAL, PROTOCOL_SOCKET, ) EDIT_KEY = "edit_selection" EDIT_ZONES = "Zones" EDIT_SETTINGS = "Arming Settings" _LOGGER = logging.getLogger(__name__) class AlarmDecoderFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a AlarmDecoder config flow.""" VERSION = 1 def __init__(self): """Initialize AlarmDecoder ConfigFlow.""" self.protocol = None @staticmethod @callback def async_get_options_flow(config_entry): """Get the options flow for AlarmDecoder.""" return AlarmDecoderOptionsFlowHandler(config_entry) async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" if user_input is not None: self.protocol = user_input[CONF_PROTOCOL] return await self.async_step_protocol() return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Required(CONF_PROTOCOL): vol.In( [PROTOCOL_SOCKET, PROTOCOL_SERIAL] ), } ), ) async def async_step_protocol(self, user_input=None): """Handle AlarmDecoder protocol setup.""" errors = {} if user_input is not None: if _device_already_added( self._async_current_entries(), user_input, self.protocol ): return self.async_abort(reason="already_configured") connection = {} baud = None if self.protocol == PROTOCOL_SOCKET: host = connection[CONF_HOST] = user_input[CONF_HOST] port = connection[CONF_PORT] = user_input[CONF_PORT] title = f"{host}:{port}" device = SocketDevice(interface=(host, port)) if self.protocol == PROTOCOL_SERIAL: path = connection[CONF_DEVICE_PATH] = user_input[CONF_DEVICE_PATH] baud = connection[CONF_DEVICE_BAUD] = user_input[CONF_DEVICE_BAUD] title = path device = SerialDevice(interface=path) controller = AdExt(device) def test_connection(): controller.open(baud) controller.close() try: await self.hass.async_add_executor_job(test_connection) return self.async_create_entry( title=title, data={CONF_PROTOCOL: self.protocol, **connection} ) except NoDeviceError: errors["base"] = "cannot_connect" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception during AlarmDecoder setup") errors["base"] = "unknown" if self.protocol == PROTOCOL_SOCKET: schema = vol.Schema( { vol.Required(CONF_HOST, default=DEFAULT_DEVICE_HOST): str, vol.Required(CONF_PORT, default=DEFAULT_DEVICE_PORT): int, } ) if self.protocol == PROTOCOL_SERIAL: schema = vol.Schema( { vol.Required(CONF_DEVICE_PATH, default=DEFAULT_DEVICE_PATH): str, vol.Required(CONF_DEVICE_BAUD, default=DEFAULT_DEVICE_BAUD): int, } ) return self.async_show_form( step_id="protocol", data_schema=schema, errors=errors, ) class AlarmDecoderOptionsFlowHandler(config_entries.OptionsFlow): """Handle AlarmDecoder options.""" def __init__(self, config_entry: config_entries.ConfigEntry) -> None: """Initialize AlarmDecoder options flow.""" self.arm_options = config_entry.options.get(OPTIONS_ARM, DEFAULT_ARM_OPTIONS) self.zone_options = config_entry.options.get( OPTIONS_ZONES, DEFAULT_ZONE_OPTIONS ) self.selected_zone = None async def async_step_init(self, user_input=None): """Manage the options.""" if user_input is not None: if user_input[EDIT_KEY] == EDIT_SETTINGS: return await self.async_step_arm_settings() if user_input[EDIT_KEY] == EDIT_ZONES: return await self.async_step_zone_select() return self.async_show_form( step_id="init", data_schema=vol.Schema( { vol.Required(EDIT_KEY, default=EDIT_SETTINGS): vol.In( [EDIT_SETTINGS, EDIT_ZONES] ) }, ), ) async def async_step_arm_settings(self, user_input=None): """Arming options form.""" if user_input is not None: return self.async_create_entry( title="", data={OPTIONS_ARM: user_input, OPTIONS_ZONES: self.zone_options}, ) return self.async_show_form( step_id="arm_settings", data_schema=vol.Schema( { vol.Optional( CONF_ALT_NIGHT_MODE, default=self.arm_options[CONF_ALT_NIGHT_MODE], ): bool, vol.Optional( CONF_AUTO_BYPASS, default=self.arm_options[CONF_AUTO_BYPASS] ): bool, vol.Optional( CONF_CODE_ARM_REQUIRED, default=self.arm_options[CONF_CODE_ARM_REQUIRED], ): bool, }, ), ) async def async_step_zone_select(self, user_input=None): """Zone selection form.""" errors = _validate_zone_input(user_input) if user_input is not None and not errors: self.selected_zone = str( int(user_input[CONF_ZONE_NUMBER]) ) # remove leading zeros return await self.async_step_zone_details() return self.async_show_form( step_id="zone_select", data_schema=vol.Schema({vol.Required(CONF_ZONE_NUMBER): str}), errors=errors, ) async def async_step_zone_details(self, user_input=None): """Zone details form.""" errors = _validate_zone_input(user_input) if user_input is not None and not errors: zone_options = self.zone_options.copy() zone_id = self.selected_zone zone_options[zone_id] = _fix_input_types(user_input) # Delete zone entry if zone_name is omitted if CONF_ZONE_NAME not in zone_options[zone_id]: zone_options.pop(zone_id) return self.async_create_entry( title="", data={OPTIONS_ARM: self.arm_options, OPTIONS_ZONES: zone_options}, ) existing_zone_settings = self.zone_options.get(self.selected_zone, {}) return self.async_show_form( step_id="zone_details", description_placeholders={CONF_ZONE_NUMBER: self.selected_zone}, data_schema=vol.Schema( { vol.Optional( CONF_ZONE_NAME, description={ "suggested_value": existing_zone_settings.get( CONF_ZONE_NAME ) }, ): str, vol.Optional( CONF_ZONE_TYPE, default=existing_zone_settings.get( CONF_ZONE_TYPE, DEFAULT_ZONE_TYPE ), ): vol.In(DEVICE_CLASSES), vol.Optional( CONF_ZONE_RFID, description={ "suggested_value": existing_zone_settings.get( CONF_ZONE_RFID ) }, ): str, vol.Optional( CONF_ZONE_LOOP, description={ "suggested_value": existing_zone_settings.get( CONF_ZONE_LOOP ) }, ): str, vol.Optional( CONF_RELAY_ADDR, description={ "suggested_value": existing_zone_settings.get( CONF_RELAY_ADDR ) }, ): str, vol.Optional( CONF_RELAY_CHAN, description={ "suggested_value": existing_zone_settings.get( CONF_RELAY_CHAN ) }, ): str, } ), errors=errors, ) def _validate_zone_input(zone_input): if not zone_input: return {} errors = {} # CONF_RELAY_ADDR & CONF_RELAY_CHAN are inclusive if (CONF_RELAY_ADDR in zone_input and CONF_RELAY_CHAN not in zone_input) or ( CONF_RELAY_ADDR not in zone_input and CONF_RELAY_CHAN in zone_input ): errors["base"] = "relay_inclusive" # The following keys must be int for key in [CONF_ZONE_NUMBER, CONF_ZONE_LOOP, CONF_RELAY_ADDR, CONF_RELAY_CHAN]: if key in zone_input: try: int(zone_input[key]) except ValueError: errors[key] = "int" # CONF_ZONE_LOOP depends on CONF_ZONE_RFID if CONF_ZONE_LOOP in zone_input and CONF_ZONE_RFID not in zone_input: errors[CONF_ZONE_LOOP] = "loop_rfid" # CONF_ZONE_LOOP must be 1-4 if ( CONF_ZONE_LOOP in zone_input and zone_input[CONF_ZONE_LOOP].isdigit() and int(zone_input[CONF_ZONE_LOOP]) not in list(range(1, 5)) ): errors[CONF_ZONE_LOOP] = "loop_range" return errors def _fix_input_types(zone_input): """Convert necessary keys to int. Since ConfigFlow inputs of type int cannot default to an empty string, we collect the values below as strings and then convert them to ints. """ for key in [CONF_ZONE_LOOP, CONF_RELAY_ADDR, CONF_RELAY_CHAN]: if key in zone_input: zone_input[key] = int(zone_input[key]) return zone_input def _device_already_added(current_entries, user_input, protocol): """Determine if entry has already been added to HA.""" user_host = user_input.get(CONF_HOST) user_port = user_input.get(CONF_PORT) user_path = user_input.get(CONF_DEVICE_PATH) user_baud = user_input.get(CONF_DEVICE_BAUD) for entry in current_entries: entry_host = entry.data.get(CONF_HOST) entry_port = entry.data.get(CONF_PORT) entry_path = entry.data.get(CONF_DEVICE_PATH) entry_baud = entry.data.get(CONF_DEVICE_BAUD) if ( protocol == PROTOCOL_SOCKET and user_host == entry_host and user_port == entry_port ): return True if ( protocol == PROTOCOL_SERIAL and user_baud == entry_baud and user_path == entry_path ): return True return False
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/alarmdecoder/config_flow.py
"""Config flow for Litter-Robot integration.""" import logging from pylitterbot.exceptions import LitterRobotException, LitterRobotLoginException import voluptuous as vol from homeassistant import config_entries from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import DOMAIN from .hub import LitterRobotHub _LOGGER = logging.getLogger(__name__) STEP_USER_DATA_SCHEMA = vol.Schema( {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} ) class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Litter-Robot.""" VERSION = 1 async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: self._async_abort_entries_match({CONF_USERNAME: user_input[CONF_USERNAME]}) hub = LitterRobotHub(self.hass, user_input) try: await hub.login() except LitterRobotLoginException: errors["base"] = "invalid_auth" except LitterRobotException: errors["base"] = "cannot_connect" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" if not errors: return self.async_create_entry( title=user_input[CONF_USERNAME], data=user_input ) return self.async_show_form( step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors )
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/litterrobot/config_flow.py
"""API for Honeywell Lyric bound to Home Assistant OAuth.""" import logging from typing import cast from aiohttp import BasicAuth, ClientSession from aiolyric.client import LyricClient from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.helpers.aiohttp_client import async_get_clientsession _LOGGER = logging.getLogger(__name__) class ConfigEntryLyricClient(LyricClient): """Provide Honeywell Lyric authentication tied to an OAuth2 based config entry.""" def __init__( self, websession: ClientSession, oauth_session: config_entry_oauth2_flow.OAuth2Session, ) -> None: """Initialize Honeywell Lyric auth.""" super().__init__(websession) self._oauth_session = oauth_session async def async_get_access_token(self): """Return a valid access token.""" if not self._oauth_session.valid_token: await self._oauth_session.async_ensure_token_valid() return self._oauth_session.token["access_token"] class LyricLocalOAuth2Implementation( config_entry_oauth2_flow.LocalOAuth2Implementation ): """Lyric Local OAuth2 implementation.""" async def _token_request(self, data: dict) -> dict: """Make a token request.""" session = async_get_clientsession(self.hass) data["client_id"] = self.client_id if self.client_secret is not None: data["client_secret"] = self.client_secret headers = { "Authorization": BasicAuth(self.client_id, self.client_secret).encode(), "Content-Type": "application/x-www-form-urlencoded", } resp = await session.post(self.token_url, headers=headers, data=data) resp.raise_for_status() return cast(dict, await resp.json())
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/lyric/api.py
"""Support for Vera thermostats.""" from __future__ import annotations from typing import Any import pyvera as veraApi from homeassistant.components.climate import ( DOMAIN as PLATFORM_DOMAIN, ENTITY_ID_FORMAT, ClimateEntity, ) from homeassistant.components.climate.const import ( FAN_AUTO, FAN_ON, HVAC_MODE_COOL, HVAC_MODE_HEAT, HVAC_MODE_HEAT_COOL, HVAC_MODE_OFF, SUPPORT_FAN_MODE, SUPPORT_TARGET_TEMPERATURE, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import convert from . import VeraDevice from .common import ControllerData, get_controller_data FAN_OPERATION_LIST = [FAN_ON, FAN_AUTO] SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE SUPPORT_HVAC = [HVAC_MODE_COOL, HVAC_MODE_HEAT, HVAC_MODE_HEAT_COOL, HVAC_MODE_OFF] async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the sensor config entry.""" controller_data = get_controller_data(hass, entry) async_add_entities( [ VeraThermostat(device, controller_data) for device in controller_data.devices.get(PLATFORM_DOMAIN) ], True, ) class VeraThermostat(VeraDevice[veraApi.VeraThermostat], ClimateEntity): """Representation of a Vera Thermostat.""" def __init__( self, vera_device: veraApi.VeraThermostat, controller_data: ControllerData ) -> None: """Initialize the Vera device.""" VeraDevice.__init__(self, vera_device, controller_data) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) @property def supported_features(self) -> int | None: """Return the list of supported features.""" return SUPPORT_FLAGS @property def hvac_mode(self) -> str: """Return hvac operation ie. heat, cool mode. Need to be one of HVAC_MODE_*. """ mode = self.vera_device.get_hvac_mode() if mode == "HeatOn": return HVAC_MODE_HEAT if mode == "CoolOn": return HVAC_MODE_COOL if mode == "AutoChangeOver": return HVAC_MODE_HEAT_COOL return HVAC_MODE_OFF @property def hvac_modes(self) -> list[str]: """Return the list of available hvac operation modes. Need to be a subset of HVAC_MODES. """ return SUPPORT_HVAC @property def fan_mode(self) -> str | None: """Return the fan setting.""" mode = self.vera_device.get_fan_mode() if mode == "ContinuousOn": return FAN_ON return FAN_AUTO @property def fan_modes(self) -> list[str] | None: """Return a list of available fan modes.""" return FAN_OPERATION_LIST def set_fan_mode(self, fan_mode) -> None: """Set new target temperature.""" if fan_mode == FAN_ON: self.vera_device.fan_on() else: self.vera_device.fan_auto() self.schedule_update_ha_state() @property def current_power_w(self) -> float | None: """Return the current power usage in W.""" power = self.vera_device.power if power: return convert(power, float, 0.0) @property def temperature_unit(self) -> str: """Return the unit of measurement.""" vera_temp_units = self.vera_device.vera_controller.temperature_units if vera_temp_units == "F": return TEMP_FAHRENHEIT return TEMP_CELSIUS @property def current_temperature(self) -> float | None: """Return the current temperature.""" return self.vera_device.get_current_temperature() @property def operation(self) -> str: """Return current operation ie. heat, cool, idle.""" return self.vera_device.get_hvac_mode() @property def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" return self.vera_device.get_current_goal_temperature() def set_temperature(self, **kwargs: Any) -> None: """Set new target temperatures.""" if kwargs.get(ATTR_TEMPERATURE) is not None: self.vera_device.set_temperature(kwargs.get(ATTR_TEMPERATURE)) self.schedule_update_ha_state() def set_hvac_mode(self, hvac_mode) -> None: """Set new target hvac mode.""" if hvac_mode == HVAC_MODE_OFF: self.vera_device.turn_off() elif hvac_mode == HVAC_MODE_HEAT_COOL: self.vera_device.turn_auto_on() elif hvac_mode == HVAC_MODE_COOL: self.vera_device.turn_cool_on() elif hvac_mode == HVAC_MODE_HEAT: self.vera_device.turn_heat_on() self.schedule_update_ha_state()
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/vera/climate.py
"""This component provides HA sensor support for Ring Door Bell/Chimes.""" from homeassistant.components.sensor import SensorEntity from homeassistant.const import ( DEVICE_CLASS_TIMESTAMP, PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, ) from homeassistant.core import callback from homeassistant.helpers.icon import icon_for_battery_level from . import DOMAIN from .entity import RingEntityMixin async def async_setup_entry(hass, config_entry, async_add_entities): """Set up a sensor for a Ring device.""" devices = hass.data[DOMAIN][config_entry.entry_id]["devices"] sensors = [] for device_type in ("chimes", "doorbots", "authorized_doorbots", "stickup_cams"): for sensor_type in SENSOR_TYPES: if device_type not in SENSOR_TYPES[sensor_type][1]: continue for device in devices[device_type]: if device_type == "battery" and device.battery_life is None: continue sensors.append( SENSOR_TYPES[sensor_type][6]( config_entry.entry_id, device, sensor_type ) ) async_add_entities(sensors) class RingSensor(RingEntityMixin, SensorEntity): """A sensor implementation for Ring device.""" def __init__(self, config_entry_id, device, sensor_type): """Initialize a sensor for Ring device.""" super().__init__(config_entry_id, device) self._sensor_type = sensor_type self._extra = None self._icon = f"mdi:{SENSOR_TYPES.get(sensor_type)[3]}" self._kind = SENSOR_TYPES.get(sensor_type)[4] self._name = f"{self._device.name} {SENSOR_TYPES.get(sensor_type)[0]}" self._unique_id = f"{device.id}-{sensor_type}" @property def should_poll(self): """Return False, updates are controlled via the hub.""" return False @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" if self._sensor_type == "volume": return self._device.volume if self._sensor_type == "battery": return self._device.battery_life @property def unique_id(self): """Return a unique ID.""" return self._unique_id @property def device_class(self): """Return sensor device class.""" return SENSOR_TYPES[self._sensor_type][5] @property def icon(self): """Icon to use in the frontend, if any.""" if self._sensor_type == "battery" and self._device.battery_life is not None: return icon_for_battery_level( battery_level=self._device.battery_life, charging=False ) return self._icon @property def unit_of_measurement(self): """Return the units of measurement.""" return SENSOR_TYPES.get(self._sensor_type)[2] class HealthDataRingSensor(RingSensor): """Ring sensor that relies on health data.""" async def async_added_to_hass(self): """Register callbacks.""" await super().async_added_to_hass() await self.ring_objects["health_data"].async_track_device( self._device, self._health_update_callback ) async def async_will_remove_from_hass(self): """Disconnect callbacks.""" await super().async_will_remove_from_hass() self.ring_objects["health_data"].async_untrack_device( self._device, self._health_update_callback ) @callback def _health_update_callback(self, _health_data): """Call update method.""" self.async_write_ha_state() @property def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" # These sensors are data hungry and not useful. Disable by default. return False @property def state(self): """Return the state of the sensor.""" if self._sensor_type == "wifi_signal_category": return self._device.wifi_signal_category if self._sensor_type == "wifi_signal_strength": return self._device.wifi_signal_strength class HistoryRingSensor(RingSensor): """Ring sensor that relies on history data.""" _latest_event = None async def async_added_to_hass(self): """Register callbacks.""" await super().async_added_to_hass() await self.ring_objects["history_data"].async_track_device( self._device, self._history_update_callback ) async def async_will_remove_from_hass(self): """Disconnect callbacks.""" await super().async_will_remove_from_hass() self.ring_objects["history_data"].async_untrack_device( self._device, self._history_update_callback ) @callback def _history_update_callback(self, history_data): """Call update method.""" if not history_data: return found = None if self._kind is None: found = history_data[0] else: for entry in history_data: if entry["kind"] == self._kind: found = entry break if not found: return self._latest_event = found self.async_write_ha_state() @property def state(self): """Return the state of the sensor.""" if self._latest_event is None: return None return self._latest_event["created_at"].isoformat() @property def extra_state_attributes(self): """Return the state attributes.""" attrs = super().extra_state_attributes if self._latest_event: attrs["created_at"] = self._latest_event["created_at"] attrs["answered"] = self._latest_event["answered"] attrs["recording_status"] = self._latest_event["recording"]["status"] attrs["category"] = self._latest_event["kind"] return attrs # Sensor types: Name, category, units, icon, kind, device_class, class SENSOR_TYPES = { "battery": [ "Battery", ["doorbots", "authorized_doorbots", "stickup_cams"], PERCENTAGE, None, None, "battery", RingSensor, ], "last_activity": [ "Last Activity", ["doorbots", "authorized_doorbots", "stickup_cams"], None, "history", None, DEVICE_CLASS_TIMESTAMP, HistoryRingSensor, ], "last_ding": [ "Last Ding", ["doorbots", "authorized_doorbots"], None, "history", "ding", DEVICE_CLASS_TIMESTAMP, HistoryRingSensor, ], "last_motion": [ "Last Motion", ["doorbots", "authorized_doorbots", "stickup_cams"], None, "history", "motion", DEVICE_CLASS_TIMESTAMP, HistoryRingSensor, ], "volume": [ "Volume", ["chimes", "doorbots", "authorized_doorbots", "stickup_cams"], None, "bell-ring", None, None, RingSensor, ], "wifi_signal_category": [ "WiFi Signal Category", ["chimes", "doorbots", "authorized_doorbots", "stickup_cams"], None, "wifi", None, None, HealthDataRingSensor, ], "wifi_signal_strength": [ "WiFi Signal Strength", ["chimes", "doorbots", "authorized_doorbots", "stickup_cams"], SIGNAL_STRENGTH_DECIBELS_MILLIWATT, "wifi", None, "signal_strength", HealthDataRingSensor, ], }
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/ring/sensor.py
"""Constants for the Huisbaasje integration.""" from huisbaasje.const import ( SOURCE_TYPE_ELECTRICITY, SOURCE_TYPE_ELECTRICITY_IN, SOURCE_TYPE_ELECTRICITY_IN_LOW, SOURCE_TYPE_ELECTRICITY_OUT, SOURCE_TYPE_ELECTRICITY_OUT_LOW, SOURCE_TYPE_GAS, ) from homeassistant.const import ( DEVICE_CLASS_ENERGY, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, TIME_HOURS, VOLUME_CUBIC_METERS, ) DATA_COORDINATOR = "coordinator" DOMAIN = "huisbaasje" FLOW_CUBIC_METERS_PER_HOUR = f"{VOLUME_CUBIC_METERS}/{TIME_HOURS}" """Interval in seconds between polls to huisbaasje.""" POLLING_INTERVAL = 20 """Timeout for fetching sensor data""" FETCH_TIMEOUT = 10 SENSOR_TYPE_RATE = "rate" SENSOR_TYPE_THIS_DAY = "thisDay" SENSOR_TYPE_THIS_WEEK = "thisWeek" SENSOR_TYPE_THIS_MONTH = "thisMonth" SENSOR_TYPE_THIS_YEAR = "thisYear" SOURCE_TYPES = [ SOURCE_TYPE_ELECTRICITY, SOURCE_TYPE_ELECTRICITY_IN, SOURCE_TYPE_ELECTRICITY_IN_LOW, SOURCE_TYPE_ELECTRICITY_OUT, SOURCE_TYPE_ELECTRICITY_OUT_LOW, SOURCE_TYPE_GAS, ] SENSORS_INFO = [ { "name": "Huisbaasje Current Power", "device_class": DEVICE_CLASS_POWER, "source_type": SOURCE_TYPE_ELECTRICITY, }, { "name": "Huisbaasje Current Power In", "device_class": DEVICE_CLASS_POWER, "source_type": SOURCE_TYPE_ELECTRICITY_IN, }, { "name": "Huisbaasje Current Power In Low", "device_class": DEVICE_CLASS_POWER, "source_type": SOURCE_TYPE_ELECTRICITY_IN_LOW, }, { "name": "Huisbaasje Current Power Out", "device_class": DEVICE_CLASS_POWER, "source_type": SOURCE_TYPE_ELECTRICITY_OUT, }, { "name": "Huisbaasje Current Power Out Low", "device_class": DEVICE_CLASS_POWER, "source_type": SOURCE_TYPE_ELECTRICITY_OUT_LOW, }, { "name": "Huisbaasje Energy Today", "device_class": DEVICE_CLASS_ENERGY, "unit_of_measurement": ENERGY_KILO_WATT_HOUR, "source_type": SOURCE_TYPE_ELECTRICITY, "sensor_type": SENSOR_TYPE_THIS_DAY, "precision": 1, }, { "name": "Huisbaasje Energy This Week", "device_class": DEVICE_CLASS_ENERGY, "unit_of_measurement": ENERGY_KILO_WATT_HOUR, "source_type": SOURCE_TYPE_ELECTRICITY, "sensor_type": SENSOR_TYPE_THIS_WEEK, "precision": 1, }, { "name": "Huisbaasje Energy This Month", "device_class": DEVICE_CLASS_ENERGY, "unit_of_measurement": ENERGY_KILO_WATT_HOUR, "source_type": SOURCE_TYPE_ELECTRICITY, "sensor_type": SENSOR_TYPE_THIS_MONTH, "precision": 1, }, { "name": "Huisbaasje Energy This Year", "device_class": DEVICE_CLASS_ENERGY, "unit_of_measurement": ENERGY_KILO_WATT_HOUR, "source_type": SOURCE_TYPE_ELECTRICITY, "sensor_type": SENSOR_TYPE_THIS_YEAR, "precision": 1, }, { "name": "Huisbaasje Current Gas", "unit_of_measurement": FLOW_CUBIC_METERS_PER_HOUR, "source_type": SOURCE_TYPE_GAS, "icon": "mdi:fire", "precision": 1, }, { "name": "Huisbaasje Gas Today", "unit_of_measurement": VOLUME_CUBIC_METERS, "source_type": SOURCE_TYPE_GAS, "sensor_type": SENSOR_TYPE_THIS_DAY, "icon": "mdi:counter", "precision": 1, }, { "name": "Huisbaasje Gas This Week", "unit_of_measurement": VOLUME_CUBIC_METERS, "source_type": SOURCE_TYPE_GAS, "sensor_type": SENSOR_TYPE_THIS_WEEK, "icon": "mdi:counter", "precision": 1, }, { "name": "Huisbaasje Gas This Month", "unit_of_measurement": VOLUME_CUBIC_METERS, "source_type": SOURCE_TYPE_GAS, "sensor_type": SENSOR_TYPE_THIS_MONTH, "icon": "mdi:counter", "precision": 1, }, { "name": "Huisbaasje Gas This Year", "unit_of_measurement": VOLUME_CUBIC_METERS, "source_type": SOURCE_TYPE_GAS, "sensor_type": SENSOR_TYPE_THIS_YEAR, "icon": "mdi:counter", "precision": 1, }, ]
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/huisbaasje/const.py
"""Classes to help gather user submissions.""" from __future__ import annotations import abc import asyncio from collections.abc import Mapping from types import MappingProxyType from typing import Any, TypedDict import uuid import voluptuous as vol from .core import HomeAssistant, callback from .exceptions import HomeAssistantError RESULT_TYPE_FORM = "form" RESULT_TYPE_CREATE_ENTRY = "create_entry" RESULT_TYPE_ABORT = "abort" RESULT_TYPE_EXTERNAL_STEP = "external" RESULT_TYPE_EXTERNAL_STEP_DONE = "external_done" RESULT_TYPE_SHOW_PROGRESS = "progress" RESULT_TYPE_SHOW_PROGRESS_DONE = "progress_done" # Event that is fired when a flow is progressed via external or progress source. EVENT_DATA_ENTRY_FLOW_PROGRESSED = "data_entry_flow_progressed" class FlowError(HomeAssistantError): """Error while configuring an account.""" class UnknownHandler(FlowError): """Unknown handler specified.""" class UnknownFlow(FlowError): """Unknown flow specified.""" class UnknownStep(FlowError): """Unknown step specified.""" class AbortFlow(FlowError): """Exception to indicate a flow needs to be aborted.""" def __init__( self, reason: str, description_placeholders: dict | None = None ) -> None: """Initialize an abort flow exception.""" super().__init__(f"Flow aborted: {reason}") self.reason = reason self.description_placeholders = description_placeholders class FlowResult(TypedDict, total=False): """Typed result dict.""" version: int type: str flow_id: str handler: str title: str data: Mapping[str, Any] step_id: str data_schema: vol.Schema extra: str required: bool errors: dict[str, str] | None description: str | None description_placeholders: dict[str, Any] | None progress_action: str url: str reason: str context: dict[str, Any] result: Any last_step: bool | None options: Mapping[str, Any] class FlowManager(abc.ABC): """Manage all the flows that are in progress.""" def __init__( self, hass: HomeAssistant, ) -> None: """Initialize the flow manager.""" self.hass = hass self._initializing: dict[str, list[asyncio.Future]] = {} self._initialize_tasks: dict[str, list[asyncio.Task]] = {} self._progress: dict[str, Any] = {} async def async_wait_init_flow_finish(self, handler: str) -> None: """Wait till all flows in progress are initialized.""" current = self._initializing.get(handler) if not current: return await asyncio.wait(current) @abc.abstractmethod async def async_create_flow( self, handler_key: Any, *, context: dict[str, Any] | None = None, data: dict[str, Any] | None = None, ) -> FlowHandler: """Create a flow for specified handler. Handler key is the domain of the component that we want to set up. """ @abc.abstractmethod async def async_finish_flow( self, flow: FlowHandler, result: FlowResult ) -> FlowResult: """Finish a config flow and add an entry.""" async def async_post_init(self, flow: FlowHandler, result: FlowResult) -> None: """Entry has finished executing its first step asynchronously.""" @callback def async_progress(self, include_uninitialized: bool = False) -> list[FlowResult]: """Return the flows in progress.""" return [ { "flow_id": flow.flow_id, "handler": flow.handler, "context": flow.context, "step_id": flow.cur_step["step_id"] if flow.cur_step else None, } for flow in self._progress.values() if include_uninitialized or flow.cur_step is not None ] async def async_init( self, handler: str, *, context: dict[str, Any] | None = None, data: Any = None ) -> FlowResult: """Start a configuration flow.""" if context is None: context = {} init_done: asyncio.Future = asyncio.Future() self._initializing.setdefault(handler, []).append(init_done) task = asyncio.create_task(self._async_init(init_done, handler, context, data)) self._initialize_tasks.setdefault(handler, []).append(task) try: flow, result = await task finally: self._initialize_tasks[handler].remove(task) self._initializing[handler].remove(init_done) if result["type"] != RESULT_TYPE_ABORT: await self.async_post_init(flow, result) return result async def _async_init( self, init_done: asyncio.Future, handler: str, context: dict, data: Any, ) -> tuple[FlowHandler, FlowResult]: """Run the init in a task to allow it to be canceled at shutdown.""" flow = await self.async_create_flow(handler, context=context, data=data) if not flow: raise UnknownFlow("Flow was not created") flow.hass = self.hass flow.handler = handler flow.flow_id = uuid.uuid4().hex flow.context = context self._progress[flow.flow_id] = flow result = await self._async_handle_step(flow, flow.init_step, data, init_done) return flow, result async def async_shutdown(self) -> None: """Cancel any initializing flows.""" for task_list in self._initialize_tasks.values(): for task in task_list: task.cancel() async def async_configure( self, flow_id: str, user_input: dict | None = None ) -> FlowResult: """Continue a configuration flow.""" flow = self._progress.get(flow_id) if flow is None: raise UnknownFlow cur_step = flow.cur_step if cur_step.get("data_schema") is not None and user_input is not None: user_input = cur_step["data_schema"](user_input) result = await self._async_handle_step(flow, cur_step["step_id"], user_input) if cur_step["type"] in (RESULT_TYPE_EXTERNAL_STEP, RESULT_TYPE_SHOW_PROGRESS): if cur_step["type"] == RESULT_TYPE_EXTERNAL_STEP and result["type"] not in ( RESULT_TYPE_EXTERNAL_STEP, RESULT_TYPE_EXTERNAL_STEP_DONE, ): raise ValueError( "External step can only transition to " "external step or external step done." ) if cur_step["type"] == RESULT_TYPE_SHOW_PROGRESS and result["type"] not in ( RESULT_TYPE_SHOW_PROGRESS, RESULT_TYPE_SHOW_PROGRESS_DONE, ): raise ValueError( "Show progress can only transition to show progress or show progress done." ) # If the result has changed from last result, fire event to update # the frontend. if ( cur_step["step_id"] != result.get("step_id") or result["type"] == RESULT_TYPE_SHOW_PROGRESS ): # Tell frontend to reload the flow state. self.hass.bus.async_fire( EVENT_DATA_ENTRY_FLOW_PROGRESSED, {"handler": flow.handler, "flow_id": flow_id, "refresh": True}, ) return result @callback def async_abort(self, flow_id: str) -> None: """Abort a flow.""" if self._progress.pop(flow_id, None) is None: raise UnknownFlow async def _async_handle_step( self, flow: Any, step_id: str, user_input: dict | None, step_done: asyncio.Future | None = None, ) -> FlowResult: """Handle a step of a flow.""" method = f"async_step_{step_id}" if not hasattr(flow, method): self._progress.pop(flow.flow_id) if step_done: step_done.set_result(None) raise UnknownStep( f"Handler {flow.__class__.__name__} doesn't support step {step_id}" ) try: result: FlowResult = await getattr(flow, method)(user_input) except AbortFlow as err: result = _create_abort_data( flow.flow_id, flow.handler, err.reason, err.description_placeholders ) # Mark the step as done. # We do this before calling async_finish_flow because config entries will hit a # circular dependency where async_finish_flow sets up new entry, which needs the # integration to be set up, which is waiting for init to be done. if step_done: step_done.set_result(None) if result["type"] not in ( RESULT_TYPE_FORM, RESULT_TYPE_EXTERNAL_STEP, RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_ABORT, RESULT_TYPE_EXTERNAL_STEP_DONE, RESULT_TYPE_SHOW_PROGRESS, RESULT_TYPE_SHOW_PROGRESS_DONE, ): raise ValueError(f"Handler returned incorrect type: {result['type']}") if result["type"] in ( RESULT_TYPE_FORM, RESULT_TYPE_EXTERNAL_STEP, RESULT_TYPE_EXTERNAL_STEP_DONE, RESULT_TYPE_SHOW_PROGRESS, RESULT_TYPE_SHOW_PROGRESS_DONE, ): flow.cur_step = result return result # We pass a copy of the result because we're mutating our version result = await self.async_finish_flow(flow, result.copy()) # _async_finish_flow may change result type, check it again if result["type"] == RESULT_TYPE_FORM: flow.cur_step = result return result # Abort and Success results both finish the flow self._progress.pop(flow.flow_id) return result class FlowHandler: """Handle the configuration flow of a component.""" # Set by flow manager cur_step: dict[str, str] | None = None # While not purely typed, it makes typehinting more useful for us # and removes the need for constant None checks or asserts. flow_id: str = None # type: ignore hass: HomeAssistant = None # type: ignore handler: str = None # type: ignore # Ensure the attribute has a subscriptable, but immutable, default value. context: dict[str, Any] = MappingProxyType({}) # type: ignore # Set by _async_create_flow callback init_step = "init" # Set by developer VERSION = 1 @property def source(self) -> str | None: """Source that initialized the flow.""" if not hasattr(self, "context"): return None return self.context.get("source", None) @property def show_advanced_options(self) -> bool: """If we should show advanced options.""" if not hasattr(self, "context"): return False return self.context.get("show_advanced_options", False) @callback def async_show_form( self, *, step_id: str, data_schema: vol.Schema = None, errors: dict[str, str] | None = None, description_placeholders: dict[str, Any] | None = None, last_step: bool | None = None, ) -> FlowResult: """Return the definition of a form to gather user input.""" return { "type": RESULT_TYPE_FORM, "flow_id": self.flow_id, "handler": self.handler, "step_id": step_id, "data_schema": data_schema, "errors": errors, "description_placeholders": description_placeholders, "last_step": last_step, # Display next or submit button in frontend } @callback def async_create_entry( self, *, title: str, data: Mapping[str, Any], description: str | None = None, description_placeholders: dict | None = None, ) -> FlowResult: """Finish config flow and create a config entry.""" return { "version": self.VERSION, "type": RESULT_TYPE_CREATE_ENTRY, "flow_id": self.flow_id, "handler": self.handler, "title": title, "data": data, "description": description, "description_placeholders": description_placeholders, } @callback def async_abort( self, *, reason: str, description_placeholders: dict | None = None ) -> FlowResult: """Abort the config flow.""" return _create_abort_data( self.flow_id, self.handler, reason, description_placeholders ) @callback def async_external_step( self, *, step_id: str, url: str, description_placeholders: dict | None = None ) -> FlowResult: """Return the definition of an external step for the user to take.""" return { "type": RESULT_TYPE_EXTERNAL_STEP, "flow_id": self.flow_id, "handler": self.handler, "step_id": step_id, "url": url, "description_placeholders": description_placeholders, } @callback def async_external_step_done(self, *, next_step_id: str) -> FlowResult: """Return the definition of an external step for the user to take.""" return { "type": RESULT_TYPE_EXTERNAL_STEP_DONE, "flow_id": self.flow_id, "handler": self.handler, "step_id": next_step_id, } @callback def async_show_progress( self, *, step_id: str, progress_action: str, description_placeholders: dict | None = None, ) -> FlowResult: """Show a progress message to the user, without user input allowed.""" return { "type": RESULT_TYPE_SHOW_PROGRESS, "flow_id": self.flow_id, "handler": self.handler, "step_id": step_id, "progress_action": progress_action, "description_placeholders": description_placeholders, } @callback def async_show_progress_done(self, *, next_step_id: str) -> FlowResult: """Mark the progress done.""" return { "type": RESULT_TYPE_SHOW_PROGRESS_DONE, "flow_id": self.flow_id, "handler": self.handler, "step_id": next_step_id, } @callback def _create_abort_data( flow_id: str, handler: str, reason: str, description_placeholders: dict | None = None, ) -> FlowResult: """Return the definition of an external step for the user to take.""" return { "type": RESULT_TYPE_ABORT, "flow_id": flow_id, "handler": handler, "reason": reason, "description_placeholders": description_placeholders, }
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/data_entry_flow.py
"""Support for GPSLogger.""" from aiohttp import web import voluptuous as vol from homeassistant.components.device_tracker import ( ATTR_BATTERY, DOMAIN as DEVICE_TRACKER, ) from homeassistant.const import ( ATTR_LATITUDE, ATTR_LONGITUDE, CONF_WEBHOOK_ID, HTTP_OK, HTTP_UNPROCESSABLE_ENTITY, ) from homeassistant.helpers import config_entry_flow import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send from .const import ( ATTR_ACCURACY, ATTR_ACTIVITY, ATTR_ALTITUDE, ATTR_DEVICE, ATTR_DIRECTION, ATTR_PROVIDER, ATTR_SPEED, DOMAIN, ) PLATFORMS = [DEVICE_TRACKER] TRACKER_UPDATE = f"{DOMAIN}_tracker_update" DEFAULT_ACCURACY = 200 DEFAULT_BATTERY = -1 def _id(value: str) -> str: """Coerce id by removing '-'.""" return value.replace("-", "") WEBHOOK_SCHEMA = vol.Schema( { vol.Required(ATTR_DEVICE): _id, vol.Required(ATTR_LATITUDE): cv.latitude, vol.Required(ATTR_LONGITUDE): cv.longitude, vol.Optional(ATTR_ACCURACY, default=DEFAULT_ACCURACY): vol.Coerce(float), vol.Optional(ATTR_ACTIVITY): cv.string, vol.Optional(ATTR_ALTITUDE): vol.Coerce(float), vol.Optional(ATTR_BATTERY, default=DEFAULT_BATTERY): vol.Coerce(float), vol.Optional(ATTR_DIRECTION): vol.Coerce(float), vol.Optional(ATTR_PROVIDER): cv.string, vol.Optional(ATTR_SPEED): vol.Coerce(float), } ) async def async_setup(hass, hass_config): """Set up the GPSLogger component.""" hass.data[DOMAIN] = {"devices": set(), "unsub_device_tracker": {}} return True async def handle_webhook(hass, webhook_id, request): """Handle incoming webhook with GPSLogger request.""" try: data = WEBHOOK_SCHEMA(dict(await request.post())) except vol.MultipleInvalid as error: return web.Response(text=error.error_message, status=HTTP_UNPROCESSABLE_ENTITY) attrs = { ATTR_SPEED: data.get(ATTR_SPEED), ATTR_DIRECTION: data.get(ATTR_DIRECTION), ATTR_ALTITUDE: data.get(ATTR_ALTITUDE), ATTR_PROVIDER: data.get(ATTR_PROVIDER), ATTR_ACTIVITY: data.get(ATTR_ACTIVITY), } device = data[ATTR_DEVICE] async_dispatcher_send( hass, TRACKER_UPDATE, device, (data[ATTR_LATITUDE], data[ATTR_LONGITUDE]), data[ATTR_BATTERY], data[ATTR_ACCURACY], attrs, ) return web.Response(text=f"Setting location for {device}", status=HTTP_OK) async def async_setup_entry(hass, entry): """Configure based on config entry.""" hass.components.webhook.async_register( DOMAIN, "GPSLogger", entry.data[CONF_WEBHOOK_ID], handle_webhook ) hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True async def async_unload_entry(hass, entry): """Unload a config entry.""" hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID]) hass.data[DOMAIN]["unsub_device_tracker"].pop(entry.entry_id)() return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) async_remove_entry = config_entry_flow.webhook_async_remove_entry
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/gpslogger/__init__.py
"""Twilio Call platform for notify component.""" import logging import urllib from twilio.base.exceptions import TwilioRestException import voluptuous as vol from homeassistant.components.notify import ( ATTR_TARGET, PLATFORM_SCHEMA, BaseNotificationService, ) from homeassistant.components.twilio import DATA_TWILIO import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_FROM_NUMBER = "from_number" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_FROM_NUMBER): vol.All( cv.string, vol.Match(r"^\+?[1-9]\d{1,14}$") ) } ) def get_service(hass, config, discovery_info=None): """Get the Twilio Call notification service.""" return TwilioCallNotificationService( hass.data[DATA_TWILIO], config[CONF_FROM_NUMBER] ) class TwilioCallNotificationService(BaseNotificationService): """Implement the notification service for the Twilio Call service.""" def __init__(self, twilio_client, from_number): """Initialize the service.""" self.client = twilio_client self.from_number = from_number def send_message(self, message="", **kwargs): """Call to specified target users.""" targets = kwargs.get(ATTR_TARGET) if not targets: _LOGGER.info("At least 1 target is required") return if message.startswith(("http://", "https://")): twimlet_url = message else: twimlet_url = "http://twimlets.com/message?Message=" twimlet_url += urllib.parse.quote(message, safe="") for target in targets: try: self.client.calls.create( to=target, url=twimlet_url, from_=self.from_number ) except TwilioRestException as exc: _LOGGER.error(exc)
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/twilio_call/notify.py
"""Support for Coinbase sensors.""" from homeassistant.components.sensor import SensorEntity from homeassistant.const import ATTR_ATTRIBUTION ATTR_NATIVE_BALANCE = "Balance in native currency" CURRENCY_ICONS = { "BTC": "mdi:currency-btc", "ETH": "mdi:currency-eth", "EUR": "mdi:currency-eur", "LTC": "mdi:litecoin", "USD": "mdi:currency-usd", } DEFAULT_COIN_ICON = "mdi:currency-usd-circle" ATTRIBUTION = "Data provided by coinbase.com" DATA_COINBASE = "coinbase_cache" def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Coinbase sensors.""" if discovery_info is None: return if "account" in discovery_info: account = discovery_info["account"] sensor = AccountSensor( hass.data[DATA_COINBASE], account["name"], account["balance"]["currency"] ) if "exchange_currency" in discovery_info: sensor = ExchangeRateSensor( hass.data[DATA_COINBASE], discovery_info["exchange_currency"], discovery_info["native_currency"], ) add_entities([sensor], True) class AccountSensor(SensorEntity): """Representation of a Coinbase.com sensor.""" def __init__(self, coinbase_data, name, currency): """Initialize the sensor.""" self._coinbase_data = coinbase_data self._name = f"Coinbase {name}" self._state = None self._unit_of_measurement = currency self._native_balance = None self._native_currency = None @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement this sensor expresses itself in.""" return self._unit_of_measurement @property def icon(self): """Return the icon to use in the frontend, if any.""" return CURRENCY_ICONS.get(self._unit_of_measurement, DEFAULT_COIN_ICON) @property def extra_state_attributes(self): """Return the state attributes of the sensor.""" return { ATTR_ATTRIBUTION: ATTRIBUTION, ATTR_NATIVE_BALANCE: f"{self._native_balance} {self._native_currency}", } def update(self): """Get the latest state of the sensor.""" self._coinbase_data.update() for account in self._coinbase_data.accounts: if self._name == f"Coinbase {account['name']}": self._state = account["balance"]["amount"] self._native_balance = account["native_balance"]["amount"] self._native_currency = account["native_balance"]["currency"] class ExchangeRateSensor(SensorEntity): """Representation of a Coinbase.com sensor.""" def __init__(self, coinbase_data, exchange_currency, native_currency): """Initialize the sensor.""" self._coinbase_data = coinbase_data self.currency = exchange_currency self._name = f"{exchange_currency} Exchange Rate" self._state = None self._unit_of_measurement = native_currency @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement this sensor expresses itself in.""" return self._unit_of_measurement @property def icon(self): """Return the icon to use in the frontend, if any.""" return CURRENCY_ICONS.get(self.currency, DEFAULT_COIN_ICON) @property def extra_state_attributes(self): """Return the state attributes of the sensor.""" return {ATTR_ATTRIBUTION: ATTRIBUTION} def update(self): """Get the latest state of the sensor.""" self._coinbase_data.update() rate = self._coinbase_data.exchange_rates.rates[self.currency] self._state = round(1 / float(rate), 2)
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/coinbase/sensor.py
"""Config flow for Mikrotik.""" import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, CONF_VERIFY_SSL, ) from homeassistant.core import callback from .const import ( CONF_ARP_PING, CONF_DETECTION_TIME, CONF_FORCE_DHCP, DEFAULT_API_PORT, DEFAULT_DETECTION_TIME, DEFAULT_NAME, DOMAIN, ) from .errors import CannotConnect, LoginError from .hub import get_api class MikrotikFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a Mikrotik config flow.""" VERSION = 1 @staticmethod @callback def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return MikrotikOptionsFlowHandler(config_entry) async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" errors = {} if user_input is not None: for entry in self._async_current_entries(): if entry.data[CONF_HOST] == user_input[CONF_HOST]: return self.async_abort(reason="already_configured") if entry.data[CONF_NAME] == user_input[CONF_NAME]: errors[CONF_NAME] = "name_exists" break try: await self.hass.async_add_executor_job(get_api, self.hass, user_input) except CannotConnect: errors["base"] = "cannot_connect" except LoginError: errors[CONF_USERNAME] = "invalid_auth" errors[CONF_PASSWORD] = "invalid_auth" if not errors: return self.async_create_entry( title=user_input[CONF_NAME], data=user_input ) return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Required(CONF_NAME, default=DEFAULT_NAME): str, vol.Required(CONF_HOST): str, vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, vol.Optional(CONF_PORT, default=DEFAULT_API_PORT): int, vol.Optional(CONF_VERIFY_SSL, default=False): bool, } ), errors=errors, ) async def async_step_import(self, import_config): """Import Miktortik from config.""" import_config[CONF_DETECTION_TIME] = import_config[ CONF_DETECTION_TIME ].total_seconds() return await self.async_step_user(user_input=import_config) class MikrotikOptionsFlowHandler(config_entries.OptionsFlow): """Handle Mikrotik options.""" def __init__(self, config_entry): """Initialize Mikrotik options flow.""" self.config_entry = config_entry async def async_step_init(self, user_input=None): """Manage the Mikrotik options.""" return await self.async_step_device_tracker() async def async_step_device_tracker(self, user_input=None): """Manage the device tracker options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) options = { vol.Optional( CONF_FORCE_DHCP, default=self.config_entry.options.get(CONF_FORCE_DHCP, False), ): bool, vol.Optional( CONF_ARP_PING, default=self.config_entry.options.get(CONF_ARP_PING, False), ): bool, vol.Optional( CONF_DETECTION_TIME, default=self.config_entry.options.get( CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME ), ): int, } return self.async_show_form( step_id="device_tracker", data_schema=vol.Schema(options) )
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/mikrotik/config_flow.py
"""Support for the PRT Heatmiser themostats using the V3 protocol.""" from __future__ import annotations import logging from heatmiserV3 import connection, heatmiser import voluptuous as vol from homeassistant.components.climate import ( HVAC_MODE_HEAT, HVAC_MODE_OFF, PLATFORM_SCHEMA, ClimateEntity, ) from homeassistant.components.climate.const import SUPPORT_TARGET_TEMPERATURE from homeassistant.const import ( ATTR_TEMPERATURE, CONF_HOST, CONF_ID, CONF_NAME, CONF_PORT, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_THERMOSTATS = "tstats" TSTATS_SCHEMA = vol.Schema( vol.All( cv.ensure_list, [{vol.Required(CONF_ID): cv.positive_int, vol.Required(CONF_NAME): cv.string}], ) ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PORT): cv.string, vol.Optional(CONF_THERMOSTATS, default=[]): TSTATS_SCHEMA, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the heatmiser thermostat.""" heatmiser_v3_thermostat = heatmiser.HeatmiserThermostat host = config[CONF_HOST] port = config[CONF_PORT] thermostats = config[CONF_THERMOSTATS] uh1_hub = connection.HeatmiserUH1(host, port) add_entities( [ HeatmiserV3Thermostat(heatmiser_v3_thermostat, thermostat, uh1_hub) for thermostat in thermostats ], True, ) class HeatmiserV3Thermostat(ClimateEntity): """Representation of a HeatmiserV3 thermostat.""" def __init__(self, therm, device, uh1): """Initialize the thermostat.""" self.therm = therm(device[CONF_ID], "prt", uh1) self.uh1 = uh1 self._name = device[CONF_NAME] self._current_temperature = None self._target_temperature = None self._id = device self.dcb = None self._hvac_mode = HVAC_MODE_HEAT self._temperature_unit = None @property def supported_features(self): """Return the list of supported features.""" return SUPPORT_TARGET_TEMPERATURE @property def name(self): """Return the name of the thermostat, if any.""" return self._name @property def temperature_unit(self): """Return the unit of measurement which this thermostat uses.""" return self._temperature_unit @property def hvac_mode(self) -> str: """Return hvac operation ie. heat, cool mode. Need to be one of HVAC_MODE_*. """ return self._hvac_mode @property def hvac_modes(self) -> list[str]: """Return the list of available hvac operation modes. Need to be a subset of HVAC_MODES. """ return [HVAC_MODE_HEAT, HVAC_MODE_OFF] @property def current_temperature(self): """Return the current temperature.""" return self._current_temperature @property def target_temperature(self): """Return the temperature we try to reach.""" return self._target_temperature def set_temperature(self, **kwargs): """Set new target temperature.""" temperature = kwargs.get(ATTR_TEMPERATURE) self._target_temperature = int(temperature) self.therm.set_target_temp(self._target_temperature) def update(self): """Get the latest data.""" self.uh1.reopen() if not self.uh1.status: _LOGGER.error("Failed to update device %s", self._name) return self.dcb = self.therm.read_dcb() self._temperature_unit = ( TEMP_CELSIUS if (self.therm.get_temperature_format() == "C") else TEMP_FAHRENHEIT ) self._current_temperature = int(self.therm.get_floor_temp()) self._target_temperature = int(self.therm.get_target_temp()) self._hvac_mode = ( HVAC_MODE_OFF if (int(self.therm.get_current_state()) == 0) else HVAC_MODE_HEAT )
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/heatmiser/climate.py
"""Support for Ecovacs Deebot vacuums.""" import logging import random import string from sucks import EcoVacsAPI, VacBot import voluptuous as vol from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, EVENT_HOMEASSISTANT_STOP from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DOMAIN = "ecovacs" CONF_COUNTRY = "country" CONF_CONTINENT = "continent" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_COUNTRY): vol.All(vol.Lower, cv.string), vol.Required(CONF_CONTINENT): vol.All(vol.Lower, cv.string), } ) }, extra=vol.ALLOW_EXTRA, ) ECOVACS_DEVICES = "ecovacs_devices" # Generate a random device ID on each bootup ECOVACS_API_DEVICEID = "".join( random.choice(string.ascii_uppercase + string.digits) for _ in range(8) ) def setup(hass, config): """Set up the Ecovacs component.""" _LOGGER.debug("Creating new Ecovacs component") hass.data[ECOVACS_DEVICES] = [] ecovacs_api = EcoVacsAPI( ECOVACS_API_DEVICEID, config[DOMAIN].get(CONF_USERNAME), EcoVacsAPI.md5(config[DOMAIN].get(CONF_PASSWORD)), config[DOMAIN].get(CONF_COUNTRY), config[DOMAIN].get(CONF_CONTINENT), ) devices = ecovacs_api.devices() _LOGGER.debug("Ecobot devices: %s", devices) for device in devices: _LOGGER.info( "Discovered Ecovacs device on account: %s with nickname %s", device["did"], device["nick"], ) vacbot = VacBot( ecovacs_api.uid, ecovacs_api.REALM, ecovacs_api.resource, ecovacs_api.user_access_token, device, config[DOMAIN].get(CONF_CONTINENT).lower(), monitor=True, ) hass.data[ECOVACS_DEVICES].append(vacbot) def stop(event: object) -> None: """Shut down open connections to Ecovacs XMPP server.""" for device in hass.data[ECOVACS_DEVICES]: _LOGGER.info( "Shutting down connection to Ecovacs device %s", device.vacuum["did"] ) device.disconnect() # Listen for HA stop to disconnect. hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop) if hass.data[ECOVACS_DEVICES]: _LOGGER.debug("Starting vacuum components") discovery.load_platform(hass, "vacuum", DOMAIN, {}, config) return True
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/ecovacs/__init__.py
"""The Ruckus Unleashed integration.""" from pyruckus import Ruckus from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC from .const import ( API_AP, API_DEVICE_NAME, API_ID, API_MAC, API_MODEL, API_SYSTEM_OVERVIEW, API_VERSION, COORDINATOR, DOMAIN, MANUFACTURER, PLATFORMS, UNDO_UPDATE_LISTENERS, ) from .coordinator import RuckusUnleashedDataUpdateCoordinator async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Ruckus Unleashed from a config entry.""" try: ruckus = await hass.async_add_executor_job( Ruckus, entry.data[CONF_HOST], entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD], ) except ConnectionError as error: raise ConfigEntryNotReady from error coordinator = RuckusUnleashedDataUpdateCoordinator(hass, ruckus=ruckus) await coordinator.async_config_entry_first_refresh() system_info = await hass.async_add_executor_job(ruckus.system_info) registry = await device_registry.async_get_registry(hass) ap_info = await hass.async_add_executor_job(ruckus.ap_info) for device in ap_info[API_AP][API_ID].values(): registry.async_get_or_create( config_entry_id=entry.entry_id, connections={(CONNECTION_NETWORK_MAC, device[API_MAC])}, identifiers={(CONNECTION_NETWORK_MAC, device[API_MAC])}, manufacturer=MANUFACTURER, name=device[API_DEVICE_NAME], model=device[API_MODEL], sw_version=system_info[API_SYSTEM_OVERVIEW][API_VERSION], ) hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.entry_id] = { COORDINATOR: coordinator, UNDO_UPDATE_LISTENERS: [], } hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) if unload_ok: for listener in hass.data[DOMAIN][entry.entry_id][UNDO_UPDATE_LISTENERS]: listener() hass.data[DOMAIN].pop(entry.entry_id) return unload_ok
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/ruckus_unleashed/__init__.py
"""Support for Neato Connected Vacuums.""" from datetime import timedelta import logging from pybotvac.exceptions import NeatoRobotException import voluptuous as vol from homeassistant.components.vacuum import ( ATTR_STATUS, STATE_CLEANING, STATE_DOCKED, STATE_ERROR, STATE_IDLE, STATE_PAUSED, STATE_RETURNING, SUPPORT_BATTERY, SUPPORT_CLEAN_SPOT, SUPPORT_LOCATE, SUPPORT_MAP, SUPPORT_PAUSE, SUPPORT_RETURN_HOME, SUPPORT_START, SUPPORT_STATE, SUPPORT_STOP, StateVacuumEntity, ) from homeassistant.const import ATTR_MODE from homeassistant.helpers import config_validation as cv, entity_platform from .const import ( ACTION, ALERTS, ERRORS, MODE, NEATO_DOMAIN, NEATO_LOGIN, NEATO_MAP_DATA, NEATO_PERSISTENT_MAPS, NEATO_ROBOTS, SCAN_INTERVAL_MINUTES, ) _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(minutes=SCAN_INTERVAL_MINUTES) SUPPORT_NEATO = ( SUPPORT_BATTERY | SUPPORT_PAUSE | SUPPORT_RETURN_HOME | SUPPORT_STOP | SUPPORT_START | SUPPORT_CLEAN_SPOT | SUPPORT_STATE | SUPPORT_MAP | SUPPORT_LOCATE ) ATTR_CLEAN_START = "clean_start" ATTR_CLEAN_STOP = "clean_stop" ATTR_CLEAN_AREA = "clean_area" ATTR_CLEAN_BATTERY_START = "battery_level_at_clean_start" ATTR_CLEAN_BATTERY_END = "battery_level_at_clean_end" ATTR_CLEAN_SUSP_COUNT = "clean_suspension_count" ATTR_CLEAN_SUSP_TIME = "clean_suspension_time" ATTR_CLEAN_PAUSE_TIME = "clean_pause_time" ATTR_CLEAN_ERROR_TIME = "clean_error_time" ATTR_LAUNCHED_FROM = "launched_from" ATTR_NAVIGATION = "navigation" ATTR_CATEGORY = "category" ATTR_ZONE = "zone" async def async_setup_entry(hass, entry, async_add_entities): """Set up Neato vacuum with config entry.""" dev = [] neato = hass.data.get(NEATO_LOGIN) mapdata = hass.data.get(NEATO_MAP_DATA) persistent_maps = hass.data.get(NEATO_PERSISTENT_MAPS) for robot in hass.data[NEATO_ROBOTS]: dev.append(NeatoConnectedVacuum(neato, robot, mapdata, persistent_maps)) if not dev: return _LOGGER.debug("Adding vacuums %s", dev) async_add_entities(dev, True) platform = entity_platform.async_get_current_platform() assert platform is not None platform.async_register_entity_service( "custom_cleaning", { vol.Optional(ATTR_MODE, default=2): cv.positive_int, vol.Optional(ATTR_NAVIGATION, default=1): cv.positive_int, vol.Optional(ATTR_CATEGORY, default=4): cv.positive_int, vol.Optional(ATTR_ZONE): cv.string, }, "neato_custom_cleaning", ) class NeatoConnectedVacuum(StateVacuumEntity): """Representation of a Neato Connected Vacuum.""" def __init__(self, neato, robot, mapdata, persistent_maps): """Initialize the Neato Connected Vacuum.""" self.robot = robot self._available = neato is not None self._mapdata = mapdata self._name = f"{self.robot.name}" self._robot_has_map = self.robot.has_persistent_maps self._robot_maps = persistent_maps self._robot_serial = self.robot.serial self._status_state = None self._clean_state = None self._state = None self._clean_time_start = None self._clean_time_stop = None self._clean_area = None self._clean_battery_start = None self._clean_battery_end = None self._clean_susp_charge_count = None self._clean_susp_time = None self._clean_pause_time = None self._clean_error_time = None self._launched_from = None self._battery_level = None self._robot_boundaries = [] self._robot_stats = None def update(self): """Update the states of Neato Vacuums.""" _LOGGER.debug("Running Neato Vacuums update for '%s'", self.entity_id) try: if self._robot_stats is None: self._robot_stats = self.robot.get_general_info().json().get("data") except NeatoRobotException: _LOGGER.warning("Couldn't fetch robot information of %s", self.entity_id) try: self._state = self.robot.state except NeatoRobotException as ex: if self._available: # print only once when available _LOGGER.error( "Neato vacuum connection error for '%s': %s", self.entity_id, ex ) self._state = None self._available = False return self._available = True _LOGGER.debug("self._state=%s", self._state) if "alert" in self._state: robot_alert = ALERTS.get(self._state["alert"]) else: robot_alert = None if self._state["state"] == 1: if self._state["details"]["isCharging"]: self._clean_state = STATE_DOCKED self._status_state = "Charging" elif ( self._state["details"]["isDocked"] and not self._state["details"]["isCharging"] ): self._clean_state = STATE_DOCKED self._status_state = "Docked" else: self._clean_state = STATE_IDLE self._status_state = "Stopped" if robot_alert is not None: self._status_state = robot_alert elif self._state["state"] == 2: if robot_alert is None: self._clean_state = STATE_CLEANING self._status_state = ( f"{MODE.get(self._state['cleaning']['mode'])} " f"{ACTION.get(self._state['action'])}" ) if ( "boundary" in self._state["cleaning"] and "name" in self._state["cleaning"]["boundary"] ): self._status_state += ( f" {self._state['cleaning']['boundary']['name']}" ) else: self._status_state = robot_alert elif self._state["state"] == 3: self._clean_state = STATE_PAUSED self._status_state = "Paused" elif self._state["state"] == 4: self._clean_state = STATE_ERROR self._status_state = ERRORS.get(self._state["error"]) self._battery_level = self._state["details"]["charge"] if not self._mapdata.get(self._robot_serial, {}).get("maps", []): return mapdata = self._mapdata[self._robot_serial]["maps"][0] self._clean_time_start = mapdata["start_at"] self._clean_time_stop = mapdata["end_at"] self._clean_area = mapdata["cleaned_area"] self._clean_susp_charge_count = mapdata["suspended_cleaning_charging_count"] self._clean_susp_time = mapdata["time_in_suspended_cleaning"] self._clean_pause_time = mapdata["time_in_pause"] self._clean_error_time = mapdata["time_in_error"] self._clean_battery_start = mapdata["run_charge_at_start"] self._clean_battery_end = mapdata["run_charge_at_end"] self._launched_from = mapdata["launched_from"] if ( self._robot_has_map and self._state["availableServices"]["maps"] != "basic-1" and self._robot_maps[self._robot_serial] ): allmaps = self._robot_maps[self._robot_serial] _LOGGER.debug( "Found the following maps for '%s': %s", self.entity_id, allmaps ) self._robot_boundaries = [] # Reset boundaries before refreshing boundaries for maps in allmaps: try: robot_boundaries = self.robot.get_map_boundaries(maps["id"]).json() except NeatoRobotException as ex: _LOGGER.error( "Could not fetch map boundaries for '%s': %s", self.entity_id, ex, ) return _LOGGER.debug( "Boundaries for robot '%s' in map '%s': %s", self.entity_id, maps["name"], robot_boundaries, ) if "boundaries" in robot_boundaries["data"]: self._robot_boundaries += robot_boundaries["data"]["boundaries"] _LOGGER.debug( "List of boundaries for '%s': %s", self.entity_id, self._robot_boundaries, ) @property def name(self): """Return the name of the device.""" return self._name @property def supported_features(self): """Flag vacuum cleaner robot features that are supported.""" return SUPPORT_NEATO @property def battery_level(self): """Return the battery level of the vacuum cleaner.""" return self._battery_level @property def available(self): """Return if the robot is available.""" return self._available @property def icon(self): """Return neato specific icon.""" return "mdi:robot-vacuum-variant" @property def state(self): """Return the status of the vacuum cleaner.""" return self._clean_state @property def unique_id(self): """Return a unique ID.""" return self._robot_serial @property def extra_state_attributes(self): """Return the state attributes of the vacuum cleaner.""" data = {} if self._status_state is not None: data[ATTR_STATUS] = self._status_state if self._clean_time_start is not None: data[ATTR_CLEAN_START] = self._clean_time_start if self._clean_time_stop is not None: data[ATTR_CLEAN_STOP] = self._clean_time_stop if self._clean_area is not None: data[ATTR_CLEAN_AREA] = self._clean_area if self._clean_susp_charge_count is not None: data[ATTR_CLEAN_SUSP_COUNT] = self._clean_susp_charge_count if self._clean_susp_time is not None: data[ATTR_CLEAN_SUSP_TIME] = self._clean_susp_time if self._clean_pause_time is not None: data[ATTR_CLEAN_PAUSE_TIME] = self._clean_pause_time if self._clean_error_time is not None: data[ATTR_CLEAN_ERROR_TIME] = self._clean_error_time if self._clean_battery_start is not None: data[ATTR_CLEAN_BATTERY_START] = self._clean_battery_start if self._clean_battery_end is not None: data[ATTR_CLEAN_BATTERY_END] = self._clean_battery_end if self._launched_from is not None: data[ATTR_LAUNCHED_FROM] = self._launched_from return data @property def device_info(self): """Device info for neato robot.""" info = {"identifiers": {(NEATO_DOMAIN, self._robot_serial)}, "name": self._name} if self._robot_stats: info["manufacturer"] = self._robot_stats["battery"]["vendor"] info["model"] = self._robot_stats["model"] info["sw_version"] = self._robot_stats["firmware"] return info def start(self): """Start cleaning or resume cleaning.""" try: if self._state["state"] == 1: self.robot.start_cleaning() elif self._state["state"] == 3: self.robot.resume_cleaning() except NeatoRobotException as ex: _LOGGER.error( "Neato vacuum connection error for '%s': %s", self.entity_id, ex ) def pause(self): """Pause the vacuum.""" try: self.robot.pause_cleaning() except NeatoRobotException as ex: _LOGGER.error( "Neato vacuum connection error for '%s': %s", self.entity_id, ex ) def return_to_base(self, **kwargs): """Set the vacuum cleaner to return to the dock.""" try: if self._clean_state == STATE_CLEANING: self.robot.pause_cleaning() self._clean_state = STATE_RETURNING self.robot.send_to_base() except NeatoRobotException as ex: _LOGGER.error( "Neato vacuum connection error for '%s': %s", self.entity_id, ex ) def stop(self, **kwargs): """Stop the vacuum cleaner.""" try: self.robot.stop_cleaning() except NeatoRobotException as ex: _LOGGER.error( "Neato vacuum connection error for '%s': %s", self.entity_id, ex ) def locate(self, **kwargs): """Locate the robot by making it emit a sound.""" try: self.robot.locate() except NeatoRobotException as ex: _LOGGER.error( "Neato vacuum connection error for '%s': %s", self.entity_id, ex ) def clean_spot(self, **kwargs): """Run a spot cleaning starting from the base.""" try: self.robot.start_spot_cleaning() except NeatoRobotException as ex: _LOGGER.error( "Neato vacuum connection error for '%s': %s", self.entity_id, ex ) def neato_custom_cleaning(self, mode, navigation, category, zone=None): """Zone cleaning service call.""" boundary_id = None if zone is not None: for boundary in self._robot_boundaries: if zone in boundary["name"]: boundary_id = boundary["id"] if boundary_id is None: _LOGGER.error( "Zone '%s' was not found for the robot '%s'", zone, self.entity_id ) return _LOGGER.info("Start cleaning zone '%s' with robot %s", zone, self.entity_id) self._clean_state = STATE_CLEANING try: self.robot.start_cleaning(mode, navigation, category, boundary_id) except NeatoRobotException as ex: _LOGGER.error( "Neato vacuum connection error for '%s': %s", self.entity_id, ex )
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/neato/vacuum.py
"""Support for Netgear LTE modems.""" import asyncio from datetime import timedelta import logging import aiohttp import attr import eternalegypt import voluptuous as vol from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.const import ( CONF_HOST, CONF_MONITORED_CONDITIONS, CONF_NAME, CONF_PASSWORD, CONF_RECIPIENT, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import callback from homeassistant.helpers import config_validation as cv, discovery from homeassistant.helpers.aiohttp_client import async_create_clientsession from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import async_track_time_interval from . import sensor_types _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(seconds=10) DISPATCHER_NETGEAR_LTE = "netgear_lte_update" DOMAIN = "netgear_lte" DATA_KEY = "netgear_lte" EVENT_SMS = "netgear_lte_sms" SERVICE_DELETE_SMS = "delete_sms" SERVICE_SET_OPTION = "set_option" SERVICE_CONNECT_LTE = "connect_lte" SERVICE_DISCONNECT_LTE = "disconnect_lte" ATTR_HOST = "host" ATTR_SMS_ID = "sms_id" ATTR_FROM = "from" ATTR_MESSAGE = "message" ATTR_FAILOVER = "failover" ATTR_AUTOCONNECT = "autoconnect" FAILOVER_MODES = ["auto", "wire", "mobile"] AUTOCONNECT_MODES = ["never", "home", "always"] NOTIFY_SCHEMA = vol.Schema( { vol.Optional(CONF_NAME, default=DOMAIN): cv.string, vol.Optional(CONF_RECIPIENT, default=[]): vol.All(cv.ensure_list, [cv.string]), } ) SENSOR_SCHEMA = vol.Schema( { vol.Optional( CONF_MONITORED_CONDITIONS, default=sensor_types.DEFAULT_SENSORS ): vol.All(cv.ensure_list, [vol.In(sensor_types.ALL_SENSORS)]) } ) BINARY_SENSOR_SCHEMA = vol.Schema( { vol.Optional( CONF_MONITORED_CONDITIONS, default=sensor_types.DEFAULT_BINARY_SENSORS ): vol.All(cv.ensure_list, [vol.In(sensor_types.ALL_BINARY_SENSORS)]) } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.All( cv.ensure_list, [ vol.Schema( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(NOTIFY_DOMAIN, default={}): vol.All( cv.ensure_list, [NOTIFY_SCHEMA] ), vol.Optional(SENSOR_DOMAIN, default={}): SENSOR_SCHEMA, vol.Optional( BINARY_SENSOR_DOMAIN, default={} ): BINARY_SENSOR_SCHEMA, } ) ], ) }, extra=vol.ALLOW_EXTRA, ) DELETE_SMS_SCHEMA = vol.Schema( { vol.Optional(ATTR_HOST): cv.string, vol.Required(ATTR_SMS_ID): vol.All(cv.ensure_list, [cv.positive_int]), } ) SET_OPTION_SCHEMA = vol.Schema( vol.All( cv.has_at_least_one_key(ATTR_FAILOVER, ATTR_AUTOCONNECT), { vol.Optional(ATTR_HOST): cv.string, vol.Optional(ATTR_FAILOVER): vol.In(FAILOVER_MODES), vol.Optional(ATTR_AUTOCONNECT): vol.In(AUTOCONNECT_MODES), }, ) ) CONNECT_LTE_SCHEMA = vol.Schema({vol.Optional(ATTR_HOST): cv.string}) DISCONNECT_LTE_SCHEMA = vol.Schema({vol.Optional(ATTR_HOST): cv.string}) @attr.s class ModemData: """Class for modem state.""" hass = attr.ib() host = attr.ib() modem = attr.ib() data = attr.ib(init=False, default=None) connected = attr.ib(init=False, default=True) async def async_update(self): """Call the API to update the data.""" try: self.data = await self.modem.information() if not self.connected: _LOGGER.warning("Connected to %s", self.host) self.connected = True except eternalegypt.Error: if self.connected: _LOGGER.warning("Lost connection to %s", self.host) self.connected = False self.data = None async_dispatcher_send(self.hass, DISPATCHER_NETGEAR_LTE) @attr.s class LTEData: """Shared state.""" websession = attr.ib() modem_data = attr.ib(init=False, factory=dict) def get_modem_data(self, config): """Get modem_data for the host in config.""" if config[CONF_HOST] is not None: return self.modem_data.get(config[CONF_HOST]) if len(self.modem_data) != 1: return None return next(iter(self.modem_data.values())) async def async_setup(hass, config): """Set up Netgear LTE component.""" if DATA_KEY not in hass.data: websession = async_create_clientsession( hass, cookie_jar=aiohttp.CookieJar(unsafe=True) ) hass.data[DATA_KEY] = LTEData(websession) async def service_handler(service): """Apply a service.""" host = service.data.get(ATTR_HOST) conf = {CONF_HOST: host} modem_data = hass.data[DATA_KEY].get_modem_data(conf) if not modem_data: _LOGGER.error("%s: host %s unavailable", service.service, host) return if service.service == SERVICE_DELETE_SMS: for sms_id in service.data[ATTR_SMS_ID]: await modem_data.modem.delete_sms(sms_id) elif service.service == SERVICE_SET_OPTION: failover = service.data.get(ATTR_FAILOVER) if failover: await modem_data.modem.set_failover_mode(failover) autoconnect = service.data.get(ATTR_AUTOCONNECT) if autoconnect: await modem_data.modem.set_autoconnect_mode(autoconnect) elif service.service == SERVICE_CONNECT_LTE: await modem_data.modem.connect_lte() elif service.service == SERVICE_DISCONNECT_LTE: await modem_data.modem.disconnect_lte() service_schemas = { SERVICE_DELETE_SMS: DELETE_SMS_SCHEMA, SERVICE_SET_OPTION: SET_OPTION_SCHEMA, SERVICE_CONNECT_LTE: CONNECT_LTE_SCHEMA, SERVICE_DISCONNECT_LTE: DISCONNECT_LTE_SCHEMA, } for service, schema in service_schemas.items(): hass.services.async_register( DOMAIN, service, service_handler, schema=schema ) netgear_lte_config = config[DOMAIN] # Set up each modem tasks = [_setup_lte(hass, lte_conf) for lte_conf in netgear_lte_config] await asyncio.wait(tasks) # Load platforms for each modem for lte_conf in netgear_lte_config: # Notify for notify_conf in lte_conf[NOTIFY_DOMAIN]: discovery_info = { CONF_HOST: lte_conf[CONF_HOST], CONF_NAME: notify_conf.get(CONF_NAME), NOTIFY_DOMAIN: notify_conf, } hass.async_create_task( discovery.async_load_platform( hass, NOTIFY_DOMAIN, DOMAIN, discovery_info, config ) ) # Sensor sensor_conf = lte_conf.get(SENSOR_DOMAIN) discovery_info = {CONF_HOST: lte_conf[CONF_HOST], SENSOR_DOMAIN: sensor_conf} hass.async_create_task( discovery.async_load_platform( hass, SENSOR_DOMAIN, DOMAIN, discovery_info, config ) ) # Binary Sensor binary_sensor_conf = lte_conf.get(BINARY_SENSOR_DOMAIN) discovery_info = { CONF_HOST: lte_conf[CONF_HOST], BINARY_SENSOR_DOMAIN: binary_sensor_conf, } hass.async_create_task( discovery.async_load_platform( hass, BINARY_SENSOR_DOMAIN, DOMAIN, discovery_info, config ) ) return True async def _setup_lte(hass, lte_config): """Set up a Netgear LTE modem.""" host = lte_config[CONF_HOST] password = lte_config[CONF_PASSWORD] websession = hass.data[DATA_KEY].websession modem = eternalegypt.Modem(hostname=host, websession=websession) modem_data = ModemData(hass, host, modem) try: await _login(hass, modem_data, password) except eternalegypt.Error: retry_task = hass.loop.create_task(_retry_login(hass, modem_data, password)) @callback def cleanup_retry(event): """Clean up retry task resources.""" if not retry_task.done(): retry_task.cancel() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, cleanup_retry) async def _login(hass, modem_data, password): """Log in and complete setup.""" await modem_data.modem.login(password=password) def fire_sms_event(sms): """Send an SMS event.""" data = { ATTR_HOST: modem_data.host, ATTR_SMS_ID: sms.id, ATTR_FROM: sms.sender, ATTR_MESSAGE: sms.message, } hass.bus.async_fire(EVENT_SMS, data) await modem_data.modem.add_sms_listener(fire_sms_event) await modem_data.async_update() hass.data[DATA_KEY].modem_data[modem_data.host] = modem_data async def _update(now): """Periodic update.""" await modem_data.async_update() update_unsub = async_track_time_interval(hass, _update, SCAN_INTERVAL) async def cleanup(event): """Clean up resources.""" update_unsub() await modem_data.modem.logout() del hass.data[DATA_KEY].modem_data[modem_data.host] hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, cleanup) async def _retry_login(hass, modem_data, password): """Sleep and retry setup.""" _LOGGER.warning("Could not connect to %s. Will keep trying", modem_data.host) modem_data.connected = False delay = 15 while not modem_data.connected: await asyncio.sleep(delay) try: await _login(hass, modem_data, password) except eternalegypt.Error: delay = min(2 * delay, 300) @attr.s class LTEEntity(Entity): """Base LTE entity.""" modem_data = attr.ib() sensor_type = attr.ib() _unique_id = attr.ib(init=False) @_unique_id.default def _init_unique_id(self): """Register unique_id while we know data is valid.""" return f"{self.sensor_type}_{self.modem_data.data.serial_number}" async def async_added_to_hass(self): """Register callback.""" self.async_on_remove( async_dispatcher_connect( self.hass, DISPATCHER_NETGEAR_LTE, self.async_write_ha_state ) ) async def async_update(self): """Force update of state.""" await self.modem_data.async_update() @property def should_poll(self): """Return that the sensor should not be polled.""" return False @property def available(self): """Return the availability of the sensor.""" return self.modem_data.data is not None @property def unique_id(self): """Return a unique ID like 'usage_5TG365AB0078V'.""" return self._unique_id @property def name(self): """Return the name of the sensor.""" return f"Netgear LTE {self.sensor_type}"
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/netgear_lte/__init__.py
"""Support for monitoring the Syncthing instance.""" import logging import aiosyncthing from homeassistant.components.sensor import SensorEntity from homeassistant.core import callback from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.event import async_track_time_interval from .const import ( DOMAIN, FOLDER_PAUSED_RECEIVED, FOLDER_SENSOR_ALERT_ICON, FOLDER_SENSOR_DEFAULT_ICON, FOLDER_SENSOR_ICONS, FOLDER_SUMMARY_RECEIVED, SCAN_INTERVAL, SERVER_AVAILABLE, SERVER_UNAVAILABLE, STATE_CHANGED_RECEIVED, ) _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Syncthing sensors.""" syncthing = hass.data[DOMAIN][config_entry.entry_id] try: config = await syncthing.system.config() version = await syncthing.system.version() except aiosyncthing.exceptions.SyncthingError as exception: raise PlatformNotReady from exception server_id = syncthing.server_id entities = [ FolderSensor( syncthing, server_id, folder["id"], folder["label"], version["version"], ) for folder in config["folders"] ] async_add_entities(entities) class FolderSensor(SensorEntity): """A Syncthing folder sensor.""" STATE_ATTRIBUTES = { "errors": "errors", "globalBytes": "global_bytes", "globalDeleted": "global_deleted", "globalDirectories": "global_directories", "globalFiles": "global_files", "globalSymlinks": "global_symlinks", "globalTotalItems": "global_total_items", "ignorePatterns": "ignore_patterns", "inSyncBytes": "in_sync_bytes", "inSyncFiles": "in_sync_files", "invalid": "invalid", "localBytes": "local_bytes", "localDeleted": "local_deleted", "localDirectories": "local_directories", "localFiles": "local_files", "localSymlinks": "local_symlinks", "localTotalItems": "local_total_items", "needBytes": "need_bytes", "needDeletes": "need_deletes", "needDirectories": "need_directories", "needFiles": "need_files", "needSymlinks": "need_symlinks", "needTotalItems": "need_total_items", "pullErrors": "pull_errors", "state": "state", } def __init__(self, syncthing, server_id, folder_id, folder_label, version): """Initialize the sensor.""" self._syncthing = syncthing self._server_id = server_id self._folder_id = folder_id self._folder_label = folder_label self._state = None self._unsub_timer = None self._version = version self._short_server_id = server_id.split("-")[0] @property def name(self): """Return the name of the sensor.""" return f"{self._short_server_id} {self._folder_id} {self._folder_label}" @property def unique_id(self): """Return the unique id of the entity.""" return f"{self._short_server_id}-{self._folder_id}" @property def state(self): """Return the state of the sensor.""" return self._state["state"] @property def available(self): """Could the device be accessed during the last update call.""" return self._state is not None @property def icon(self): """Return the icon for this sensor.""" if self._state is None: return FOLDER_SENSOR_DEFAULT_ICON if self.state in FOLDER_SENSOR_ICONS: return FOLDER_SENSOR_ICONS[self.state] return FOLDER_SENSOR_ALERT_ICON @property def extra_state_attributes(self): """Return the state attributes.""" return self._state @property def should_poll(self): """Return the polling requirement for this sensor.""" return False @property def device_info(self): """Return device information.""" return { "identifiers": {(DOMAIN, self._server_id)}, "name": f"Syncthing ({self._syncthing.url})", "manufacturer": "Syncthing Team", "sw_version": self._version, "entry_type": "service", } async def async_update_status(self): """Request folder status and update state.""" try: state = await self._syncthing.database.status(self._folder_id) except aiosyncthing.exceptions.SyncthingError: self._state = None else: self._state = self._filter_state(state) self.async_write_ha_state() def subscribe(self): """Start polling syncthing folder status.""" if self._unsub_timer is None: async def refresh(event_time): """Get the latest data from Syncthing.""" await self.async_update_status() self._unsub_timer = async_track_time_interval( self.hass, refresh, SCAN_INTERVAL ) @callback def unsubscribe(self): """Stop polling syncthing folder status.""" if self._unsub_timer is not None: self._unsub_timer() self._unsub_timer = None async def async_added_to_hass(self): """Handle entity which will be added.""" @callback def handle_folder_summary(event): if self._state is not None: self._state = self._filter_state(event["data"]["summary"]) self.async_write_ha_state() self.async_on_remove( async_dispatcher_connect( self.hass, f"{FOLDER_SUMMARY_RECEIVED}-{self._server_id}-{self._folder_id}", handle_folder_summary, ) ) @callback def handle_state_changed(event): if self._state is not None: self._state["state"] = event["data"]["to"] self.async_write_ha_state() self.async_on_remove( async_dispatcher_connect( self.hass, f"{STATE_CHANGED_RECEIVED}-{self._server_id}-{self._folder_id}", handle_state_changed, ) ) @callback def handle_folder_paused(event): if self._state is not None: self._state["state"] = "paused" self.async_write_ha_state() self.async_on_remove( async_dispatcher_connect( self.hass, f"{FOLDER_PAUSED_RECEIVED}-{self._server_id}-{self._folder_id}", handle_folder_paused, ) ) @callback def handle_server_unavailable(): self._state = None self.unsubscribe() self.async_write_ha_state() self.async_on_remove( async_dispatcher_connect( self.hass, f"{SERVER_UNAVAILABLE}-{self._server_id}", handle_server_unavailable, ) ) async def handle_server_available(): self.subscribe() await self.async_update_status() self.async_on_remove( async_dispatcher_connect( self.hass, f"{SERVER_AVAILABLE}-{self._server_id}", handle_server_available, ) ) self.subscribe() self.async_on_remove(self.unsubscribe) await self.async_update_status() def _filter_state(self, state): # Select only needed state attributes and map their names state = { self.STATE_ATTRIBUTES[key]: value for key, value in state.items() if key in self.STATE_ATTRIBUTES } # A workaround, for some reason, state of paused folders is an empty string if state["state"] == "": state["state"] = "paused" # Add some useful attributes state["id"] = self._folder_id state["label"] = self._folder_label return state
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/syncthing/sensor.py