input
stringlengths
53
297k
output
stringclasses
604 values
repo_name
stringclasses
376 values
test_path
stringclasses
583 values
code_path
stringlengths
7
116
"""Implement the Google Smart Home traits.""" from __future__ import annotations import logging from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.humidifier import const as humidifier from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_CODE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, CAST_APP_ID_HOMEASSISTANT, SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_CUSTOM_BYPASS, SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_CUSTOM_BYPASS, STATE_ALARM_ARMED_HOME, STATE_ALARM_ARMED_NIGHT, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_ALARM_TRIGGERED, STATE_IDLE, STATE_LOCKED, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN from homeassistant.helpers.network import get_url from homeassistant.util import color as color_util, dt, temperature as temp_util from .const import ( CHALLENGE_ACK_NEEDED, CHALLENGE_FAILED_PIN_NEEDED, CHALLENGE_PIN_NEEDED, ERR_ALREADY_ARMED, ERR_ALREADY_DISARMED, ERR_ALREADY_STOPPED, ERR_CHALLENGE_NOT_SETUP, ERR_NOT_SUPPORTED, ERR_UNSUPPORTED_INPUT, ERR_VALUE_OUT_OF_RANGE, ) from .error import ChallengeNeeded, SmartHomeError _LOGGER = logging.getLogger(__name__) PREFIX_TRAITS = "action.devices.traits." TRAIT_CAMERA_STREAM = f"{PREFIX_TRAITS}CameraStream" TRAIT_ONOFF = f"{PREFIX_TRAITS}OnOff" TRAIT_DOCK = f"{PREFIX_TRAITS}Dock" TRAIT_STARTSTOP = f"{PREFIX_TRAITS}StartStop" TRAIT_BRIGHTNESS = f"{PREFIX_TRAITS}Brightness" TRAIT_COLOR_SETTING = f"{PREFIX_TRAITS}ColorSetting" TRAIT_SCENE = f"{PREFIX_TRAITS}Scene" TRAIT_TEMPERATURE_SETTING = f"{PREFIX_TRAITS}TemperatureSetting" TRAIT_LOCKUNLOCK = f"{PREFIX_TRAITS}LockUnlock" TRAIT_FANSPEED = f"{PREFIX_TRAITS}FanSpeed" TRAIT_MODES = f"{PREFIX_TRAITS}Modes" TRAIT_INPUTSELECTOR = f"{PREFIX_TRAITS}InputSelector" TRAIT_OPENCLOSE = f"{PREFIX_TRAITS}OpenClose" TRAIT_VOLUME = f"{PREFIX_TRAITS}Volume" TRAIT_ARMDISARM = f"{PREFIX_TRAITS}ArmDisarm" TRAIT_HUMIDITY_SETTING = f"{PREFIX_TRAITS}HumiditySetting" TRAIT_TRANSPORT_CONTROL = f"{PREFIX_TRAITS}TransportControl" TRAIT_MEDIA_STATE = f"{PREFIX_TRAITS}MediaState" PREFIX_COMMANDS = "action.devices.commands." COMMAND_ONOFF = f"{PREFIX_COMMANDS}OnOff" COMMAND_GET_CAMERA_STREAM = f"{PREFIX_COMMANDS}GetCameraStream" COMMAND_DOCK = f"{PREFIX_COMMANDS}Dock" COMMAND_STARTSTOP = f"{PREFIX_COMMANDS}StartStop" COMMAND_PAUSEUNPAUSE = f"{PREFIX_COMMANDS}PauseUnpause" COMMAND_BRIGHTNESS_ABSOLUTE = f"{PREFIX_COMMANDS}BrightnessAbsolute" COMMAND_COLOR_ABSOLUTE = f"{PREFIX_COMMANDS}ColorAbsolute" COMMAND_ACTIVATE_SCENE = f"{PREFIX_COMMANDS}ActivateScene" COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT = ( f"{PREFIX_COMMANDS}ThermostatTemperatureSetpoint" ) COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE = ( f"{PREFIX_COMMANDS}ThermostatTemperatureSetRange" ) COMMAND_THERMOSTAT_SET_MODE = f"{PREFIX_COMMANDS}ThermostatSetMode" COMMAND_LOCKUNLOCK = f"{PREFIX_COMMANDS}LockUnlock" COMMAND_FANSPEED = f"{PREFIX_COMMANDS}SetFanSpeed" COMMAND_MODES = f"{PREFIX_COMMANDS}SetModes" COMMAND_INPUT = f"{PREFIX_COMMANDS}SetInput" COMMAND_NEXT_INPUT = f"{PREFIX_COMMANDS}NextInput" COMMAND_PREVIOUS_INPUT = f"{PREFIX_COMMANDS}PreviousInput" COMMAND_OPENCLOSE = f"{PREFIX_COMMANDS}OpenClose" COMMAND_OPENCLOSE_RELATIVE = f"{PREFIX_COMMANDS}OpenCloseRelative" COMMAND_SET_VOLUME = f"{PREFIX_COMMANDS}setVolume" COMMAND_VOLUME_RELATIVE = f"{PREFIX_COMMANDS}volumeRelative" COMMAND_MUTE = f"{PREFIX_COMMANDS}mute" COMMAND_ARMDISARM = f"{PREFIX_COMMANDS}ArmDisarm" COMMAND_MEDIA_NEXT = f"{PREFIX_COMMANDS}mediaNext" COMMAND_MEDIA_PAUSE = f"{PREFIX_COMMANDS}mediaPause" COMMAND_MEDIA_PREVIOUS = f"{PREFIX_COMMANDS}mediaPrevious" COMMAND_MEDIA_RESUME = f"{PREFIX_COMMANDS}mediaResume" COMMAND_MEDIA_SEEK_RELATIVE = f"{PREFIX_COMMANDS}mediaSeekRelative" COMMAND_MEDIA_SEEK_TO_POSITION = f"{PREFIX_COMMANDS}mediaSeekToPosition" COMMAND_MEDIA_SHUFFLE = f"{PREFIX_COMMANDS}mediaShuffle" COMMAND_MEDIA_STOP = f"{PREFIX_COMMANDS}mediaStop" COMMAND_SET_HUMIDITY = f"{PREFIX_COMMANDS}SetHumidity" TRAITS = [] def register_trait(trait): """Decorate a function to register a trait.""" TRAITS.append(trait) return trait def _google_temp_unit(units): """Return Google temperature unit.""" if units == TEMP_FAHRENHEIT: return "F" return "C" def _next_selected(items: list[str], selected: str | None) -> str | None: """Return the next item in a item list starting at given value. If selected is missing in items, None is returned """ try: index = items.index(selected) except ValueError: return None next_item = 0 if index == len(items) - 1 else index + 1 return items[next_item] class _Trait: """Represents a Trait inside Google Assistant skill.""" commands = [] @staticmethod def might_2fa(domain, features, device_class): """Return if the trait might ask for 2FA.""" return False def __init__(self, hass, state, config): """Initialize a trait for a state.""" self.hass = hass self.state = state self.config = config def sync_attributes(self): """Return attributes for a sync request.""" raise NotImplementedError def query_attributes(self): """Return the attributes of this trait for this entity.""" raise NotImplementedError def can_execute(self, command, params): """Test if command can be executed.""" return command in self.commands async def execute(self, command, data, params, challenge): """Execute a trait command.""" raise NotImplementedError @register_trait class BrightnessTrait(_Trait): """Trait to control brightness of a device. https://developers.google.com/actions/smarthome/traits/brightness """ name = TRAIT_BRIGHTNESS commands = [COMMAND_BRIGHTNESS_ABSOLUTE] @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" if domain == light.DOMAIN: return features & light.SUPPORT_BRIGHTNESS return False def sync_attributes(self): """Return brightness attributes for a sync request.""" return {} def query_attributes(self): """Return brightness query attributes.""" domain = self.state.domain response = {} if domain == light.DOMAIN: brightness = self.state.attributes.get(light.ATTR_BRIGHTNESS) if brightness is not None: response["brightness"] = int(100 * (brightness / 255)) else: response["brightness"] = 0 return response async def execute(self, command, data, params, challenge): """Execute a brightness command.""" domain = self.state.domain if domain == light.DOMAIN: await self.hass.services.async_call( light.DOMAIN, light.SERVICE_TURN_ON, { ATTR_ENTITY_ID: self.state.entity_id, light.ATTR_BRIGHTNESS_PCT: params["brightness"], }, blocking=True, context=data.context, ) @register_trait class CameraStreamTrait(_Trait): """Trait to stream from cameras. https://developers.google.com/actions/smarthome/traits/camerastream """ name = TRAIT_CAMERA_STREAM commands = [COMMAND_GET_CAMERA_STREAM] stream_info = None @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" if domain == camera.DOMAIN: return features & camera.SUPPORT_STREAM return False def sync_attributes(self): """Return stream attributes for a sync request.""" return { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } def query_attributes(self): """Return camera stream attributes.""" return self.stream_info or {} async def execute(self, command, data, params, challenge): """Execute a get camera stream command.""" url = await self.hass.components.camera.async_request_stream( self.state.entity_id, "hls" ) self.stream_info = { "cameraStreamAccessUrl": f"{get_url(self.hass)}{url}", "cameraStreamReceiverAppId": CAST_APP_ID_HOMEASSISTANT, } @register_trait class OnOffTrait(_Trait): """Trait to offer basic on and off functionality. https://developers.google.com/actions/smarthome/traits/onoff """ name = TRAIT_ONOFF commands = [COMMAND_ONOFF] @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" return domain in ( group.DOMAIN, input_boolean.DOMAIN, switch.DOMAIN, fan.DOMAIN, light.DOMAIN, media_player.DOMAIN, humidifier.DOMAIN, ) def sync_attributes(self): """Return OnOff attributes for a sync request.""" if self.state.attributes.get(ATTR_ASSUMED_STATE, False): return {"commandOnlyOnOff": True} return {} def query_attributes(self): """Return OnOff query attributes.""" return {"on": self.state.state not in (STATE_OFF, STATE_UNKNOWN)} async def execute(self, command, data, params, challenge): """Execute an OnOff command.""" domain = self.state.domain if domain == group.DOMAIN: service_domain = HA_DOMAIN service = SERVICE_TURN_ON if params["on"] else SERVICE_TURN_OFF else: service_domain = domain service = SERVICE_TURN_ON if params["on"] else SERVICE_TURN_OFF await self.hass.services.async_call( service_domain, service, {ATTR_ENTITY_ID: self.state.entity_id}, blocking=True, context=data.context, ) @register_trait class ColorSettingTrait(_Trait): """Trait to offer color temperature functionality. https://developers.google.com/actions/smarthome/traits/colortemperature """ name = TRAIT_COLOR_SETTING commands = [COMMAND_COLOR_ABSOLUTE] @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" if domain != light.DOMAIN: return False return features & light.SUPPORT_COLOR_TEMP or features & light.SUPPORT_COLOR def sync_attributes(self): """Return color temperature attributes for a sync request.""" attrs = self.state.attributes features = attrs.get(ATTR_SUPPORTED_FEATURES, 0) response = {} if features & light.SUPPORT_COLOR: response["colorModel"] = "hsv" if features & light.SUPPORT_COLOR_TEMP: # Max Kelvin is Min Mireds K = 1000000 / mireds # Min Kelvin is Max Mireds K = 1000000 / mireds response["colorTemperatureRange"] = { "temperatureMaxK": color_util.color_temperature_mired_to_kelvin( attrs.get(light.ATTR_MIN_MIREDS) ), "temperatureMinK": color_util.color_temperature_mired_to_kelvin( attrs.get(light.ATTR_MAX_MIREDS) ), } return response def query_attributes(self): """Return color temperature query attributes.""" features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) color = {} if features & light.SUPPORT_COLOR: color_hs = self.state.attributes.get(light.ATTR_HS_COLOR) brightness = self.state.attributes.get(light.ATTR_BRIGHTNESS, 1) if color_hs is not None: color["spectrumHsv"] = { "hue": color_hs[0], "saturation": color_hs[1] / 100, "value": brightness / 255, } if features & light.SUPPORT_COLOR_TEMP: temp = self.state.attributes.get(light.ATTR_COLOR_TEMP) # Some faulty integrations might put 0 in here, raising exception. if temp == 0: _LOGGER.warning( "Entity %s has incorrect color temperature %s", self.state.entity_id, temp, ) elif temp is not None: color["temperatureK"] = color_util.color_temperature_mired_to_kelvin( temp ) response = {} if color: response["color"] = color return response async def execute(self, command, data, params, challenge): """Execute a color temperature command.""" if "temperature" in params["color"]: temp = color_util.color_temperature_kelvin_to_mired( params["color"]["temperature"] ) min_temp = self.state.attributes[light.ATTR_MIN_MIREDS] max_temp = self.state.attributes[light.ATTR_MAX_MIREDS] if temp < min_temp or temp > max_temp: raise SmartHomeError( ERR_VALUE_OUT_OF_RANGE, f"Temperature should be between {min_temp} and {max_temp}", ) await self.hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: self.state.entity_id, light.ATTR_COLOR_TEMP: temp}, blocking=True, context=data.context, ) elif "spectrumRGB" in params["color"]: # Convert integer to hex format and left pad with 0's till length 6 hex_value = f"{params['color']['spectrumRGB']:06x}" color = color_util.color_RGB_to_hs( *color_util.rgb_hex_to_rgb_list(hex_value) ) await self.hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: self.state.entity_id, light.ATTR_HS_COLOR: color}, blocking=True, context=data.context, ) elif "spectrumHSV" in params["color"]: color = params["color"]["spectrumHSV"] saturation = color["saturation"] * 100 brightness = color["value"] * 255 await self.hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, { ATTR_ENTITY_ID: self.state.entity_id, light.ATTR_HS_COLOR: [color["hue"], saturation], light.ATTR_BRIGHTNESS: brightness, }, blocking=True, context=data.context, ) @register_trait class SceneTrait(_Trait): """Trait to offer scene functionality. https://developers.google.com/actions/smarthome/traits/scene """ name = TRAIT_SCENE commands = [COMMAND_ACTIVATE_SCENE] @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" return domain in (scene.DOMAIN, script.DOMAIN) def sync_attributes(self): """Return scene attributes for a sync request.""" # Neither supported domain can support sceneReversible return {} def query_attributes(self): """Return scene query attributes.""" return {} async def execute(self, command, data, params, challenge): """Execute a scene command.""" # Don't block for scripts as they can be slow. await self.hass.services.async_call( self.state.domain, SERVICE_TURN_ON, {ATTR_ENTITY_ID: self.state.entity_id}, blocking=self.state.domain != script.DOMAIN, context=data.context, ) @register_trait class DockTrait(_Trait): """Trait to offer dock functionality. https://developers.google.com/actions/smarthome/traits/dock """ name = TRAIT_DOCK commands = [COMMAND_DOCK] @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" return domain == vacuum.DOMAIN def sync_attributes(self): """Return dock attributes for a sync request.""" return {} def query_attributes(self): """Return dock query attributes.""" return {"isDocked": self.state.state == vacuum.STATE_DOCKED} async def execute(self, command, data, params, challenge): """Execute a dock command.""" await self.hass.services.async_call( self.state.domain, vacuum.SERVICE_RETURN_TO_BASE, {ATTR_ENTITY_ID: self.state.entity_id}, blocking=True, context=data.context, ) @register_trait class StartStopTrait(_Trait): """Trait to offer StartStop functionality. https://developers.google.com/actions/smarthome/traits/startstop """ name = TRAIT_STARTSTOP commands = [COMMAND_STARTSTOP, COMMAND_PAUSEUNPAUSE] @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" if domain == vacuum.DOMAIN: return True if domain == cover.DOMAIN and features & cover.SUPPORT_STOP: return True return False def sync_attributes(self): """Return StartStop attributes for a sync request.""" domain = self.state.domain if domain == vacuum.DOMAIN: return { "pausable": self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) & vacuum.SUPPORT_PAUSE != 0 } if domain == cover.DOMAIN: return {} def query_attributes(self): """Return StartStop query attributes.""" domain = self.state.domain state = self.state.state if domain == vacuum.DOMAIN: return { "isRunning": state == vacuum.STATE_CLEANING, "isPaused": state == vacuum.STATE_PAUSED, } if domain == cover.DOMAIN: return {"isRunning": state in (cover.STATE_CLOSING, cover.STATE_OPENING)} async def execute(self, command, data, params, challenge): """Execute a StartStop command.""" domain = self.state.domain if domain == vacuum.DOMAIN: return await self._execute_vacuum(command, data, params, challenge) if domain == cover.DOMAIN: return await self._execute_cover(command, data, params, challenge) async def _execute_vacuum(self, command, data, params, challenge): """Execute a StartStop command.""" if command == COMMAND_STARTSTOP: if params["start"]: await self.hass.services.async_call( self.state.domain, vacuum.SERVICE_START, {ATTR_ENTITY_ID: self.state.entity_id}, blocking=True, context=data.context, ) else: await self.hass.services.async_call( self.state.domain, vacuum.SERVICE_STOP, {ATTR_ENTITY_ID: self.state.entity_id}, blocking=True, context=data.context, ) elif command == COMMAND_PAUSEUNPAUSE: if params["pause"]: await self.hass.services.async_call( self.state.domain, vacuum.SERVICE_PAUSE, {ATTR_ENTITY_ID: self.state.entity_id}, blocking=True, context=data.context, ) else: await self.hass.services.async_call( self.state.domain, vacuum.SERVICE_START, {ATTR_ENTITY_ID: self.state.entity_id}, blocking=True, context=data.context, ) async def _execute_cover(self, command, data, params, challenge): """Execute a StartStop command.""" if command == COMMAND_STARTSTOP: if params["start"] is False: if ( self.state.state in ( cover.STATE_CLOSING, cover.STATE_OPENING, ) or self.state.attributes.get(ATTR_ASSUMED_STATE) ): await self.hass.services.async_call( self.state.domain, cover.SERVICE_STOP_COVER, {ATTR_ENTITY_ID: self.state.entity_id}, blocking=True, context=data.context, ) else: raise SmartHomeError( ERR_ALREADY_STOPPED, "Cover is already stopped" ) else: raise SmartHomeError( ERR_NOT_SUPPORTED, "Starting a cover is not supported" ) else: raise SmartHomeError( ERR_NOT_SUPPORTED, f"Command {command} is not supported" ) @register_trait class TemperatureSettingTrait(_Trait): """Trait to offer handling both temperature point and modes functionality. https://developers.google.com/actions/smarthome/traits/temperaturesetting """ name = TRAIT_TEMPERATURE_SETTING commands = [ COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, COMMAND_THERMOSTAT_SET_MODE, ] # We do not support "on" as we are unable to know how to restore # the last mode. hvac_to_google = { climate.HVAC_MODE_HEAT: "heat", climate.HVAC_MODE_COOL: "cool", climate.HVAC_MODE_OFF: "off", climate.HVAC_MODE_AUTO: "auto", climate.HVAC_MODE_HEAT_COOL: "heatcool", climate.HVAC_MODE_FAN_ONLY: "fan-only", climate.HVAC_MODE_DRY: "dry", } google_to_hvac = {value: key for key, value in hvac_to_google.items()} preset_to_google = {climate.PRESET_ECO: "eco"} google_to_preset = {value: key for key, value in preset_to_google.items()} @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" if domain == climate.DOMAIN: return True return ( domain == sensor.DOMAIN and device_class == sensor.DEVICE_CLASS_TEMPERATURE ) @property def climate_google_modes(self): """Return supported Google modes.""" modes = [] attrs = self.state.attributes for mode in attrs.get(climate.ATTR_HVAC_MODES, []): google_mode = self.hvac_to_google.get(mode) if google_mode and google_mode not in modes: modes.append(google_mode) for preset in attrs.get(climate.ATTR_PRESET_MODES, []): google_mode = self.preset_to_google.get(preset) if google_mode and google_mode not in modes: modes.append(google_mode) return modes def sync_attributes(self): """Return temperature point and modes attributes for a sync request.""" response = {} attrs = self.state.attributes domain = self.state.domain response["thermostatTemperatureUnit"] = _google_temp_unit( self.hass.config.units.temperature_unit ) if domain == sensor.DOMAIN: device_class = attrs.get(ATTR_DEVICE_CLASS) if device_class == sensor.DEVICE_CLASS_TEMPERATURE: response["queryOnlyTemperatureSetting"] = True elif domain == climate.DOMAIN: modes = self.climate_google_modes # Some integrations don't support modes (e.g. opentherm), but Google doesn't # support changing the temperature if we don't have any modes. If there's # only one Google doesn't support changing it, so the default mode here is # only cosmetic. if len(modes) == 0: modes.append("heat") if "off" in modes and any( mode in modes for mode in ("heatcool", "heat", "cool") ): modes.append("on") response["availableThermostatModes"] = modes return response def query_attributes(self): """Return temperature point and modes query attributes.""" response = {} attrs = self.state.attributes domain = self.state.domain unit = self.hass.config.units.temperature_unit if domain == sensor.DOMAIN: device_class = attrs.get(ATTR_DEVICE_CLASS) if device_class == sensor.DEVICE_CLASS_TEMPERATURE: current_temp = self.state.state if current_temp not in (STATE_UNKNOWN, STATE_UNAVAILABLE): response["thermostatTemperatureAmbient"] = round( temp_util.convert(float(current_temp), unit, TEMP_CELSIUS), 1 ) elif domain == climate.DOMAIN: operation = self.state.state preset = attrs.get(climate.ATTR_PRESET_MODE) supported = attrs.get(ATTR_SUPPORTED_FEATURES, 0) if preset in self.preset_to_google: response["thermostatMode"] = self.preset_to_google[preset] else: response["thermostatMode"] = self.hvac_to_google.get(operation) current_temp = attrs.get(climate.ATTR_CURRENT_TEMPERATURE) if current_temp is not None: response["thermostatTemperatureAmbient"] = round( temp_util.convert(current_temp, unit, TEMP_CELSIUS), 1 ) current_humidity = attrs.get(climate.ATTR_CURRENT_HUMIDITY) if current_humidity is not None: response["thermostatHumidityAmbient"] = current_humidity if operation in (climate.HVAC_MODE_AUTO, climate.HVAC_MODE_HEAT_COOL): if supported & climate.SUPPORT_TARGET_TEMPERATURE_RANGE: response["thermostatTemperatureSetpointHigh"] = round( temp_util.convert( attrs[climate.ATTR_TARGET_TEMP_HIGH], unit, TEMP_CELSIUS ), 1, ) response["thermostatTemperatureSetpointLow"] = round( temp_util.convert( attrs[climate.ATTR_TARGET_TEMP_LOW], unit, TEMP_CELSIUS ), 1, ) else: target_temp = attrs.get(ATTR_TEMPERATURE) if target_temp is not None: target_temp = round( temp_util.convert(target_temp, unit, TEMP_CELSIUS), 1 ) response["thermostatTemperatureSetpointHigh"] = target_temp response["thermostatTemperatureSetpointLow"] = target_temp else: target_temp = attrs.get(ATTR_TEMPERATURE) if target_temp is not None: response["thermostatTemperatureSetpoint"] = round( temp_util.convert(target_temp, unit, TEMP_CELSIUS), 1 ) return response async def execute(self, command, data, params, challenge): """Execute a temperature point or mode command.""" domain = self.state.domain if domain == sensor.DOMAIN: raise SmartHomeError( ERR_NOT_SUPPORTED, "Execute is not supported by sensor" ) # All sent in temperatures are always in Celsius unit = self.hass.config.units.temperature_unit min_temp = self.state.attributes[climate.ATTR_MIN_TEMP] max_temp = self.state.attributes[climate.ATTR_MAX_TEMP] if command == COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT: temp = temp_util.convert( params["thermostatTemperatureSetpoint"], TEMP_CELSIUS, unit ) if unit == TEMP_FAHRENHEIT: temp = round(temp) if temp < min_temp or temp > max_temp: raise SmartHomeError( ERR_VALUE_OUT_OF_RANGE, f"Temperature should be between {min_temp} and {max_temp}", ) await self.hass.services.async_call( climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: self.state.entity_id, ATTR_TEMPERATURE: temp}, blocking=True, context=data.context, ) elif command == COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE: temp_high = temp_util.convert( params["thermostatTemperatureSetpointHigh"], TEMP_CELSIUS, unit ) if unit == TEMP_FAHRENHEIT: temp_high = round(temp_high) if temp_high < min_temp or temp_high > max_temp: raise SmartHomeError( ERR_VALUE_OUT_OF_RANGE, ( f"Upper bound for temperature range should be between " f"{min_temp} and {max_temp}" ), ) temp_low = temp_util.convert( params["thermostatTemperatureSetpointLow"], TEMP_CELSIUS, unit ) if unit == TEMP_FAHRENHEIT: temp_low = round(temp_low) if temp_low < min_temp or temp_low > max_temp: raise SmartHomeError( ERR_VALUE_OUT_OF_RANGE, ( f"Lower bound for temperature range should be between " f"{min_temp} and {max_temp}" ), ) supported = self.state.attributes.get(ATTR_SUPPORTED_FEATURES) svc_data = {ATTR_ENTITY_ID: self.state.entity_id} if supported & climate.SUPPORT_TARGET_TEMPERATURE_RANGE: svc_data[climate.ATTR_TARGET_TEMP_HIGH] = temp_high svc_data[climate.ATTR_TARGET_TEMP_LOW] = temp_low else: svc_data[ATTR_TEMPERATURE] = (temp_high + temp_low) / 2 await self.hass.services.async_call( climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE, svc_data, blocking=True, context=data.context, ) elif command == COMMAND_THERMOSTAT_SET_MODE: target_mode = params["thermostatMode"] supported = self.state.attributes.get(ATTR_SUPPORTED_FEATURES) if target_mode == "on": await self.hass.services.async_call( climate.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: self.state.entity_id}, blocking=True, context=data.context, ) return if target_mode == "off": await self.hass.services.async_call( climate.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: self.state.entity_id}, blocking=True, context=data.context, ) return if target_mode in self.google_to_preset: await self.hass.services.async_call( climate.DOMAIN, climate.SERVICE_SET_PRESET_MODE, { climate.ATTR_PRESET_MODE: self.google_to_preset[target_mode], ATTR_ENTITY_ID: self.state.entity_id, }, blocking=True, context=data.context, ) return await self.hass.services.async_call( climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE, { ATTR_ENTITY_ID: self.state.entity_id, climate.ATTR_HVAC_MODE: self.google_to_hvac[target_mode], }, blocking=True, context=data.context, ) @register_trait class HumiditySettingTrait(_Trait): """Trait to offer humidity setting functionality. https://developers.google.com/actions/smarthome/traits/humiditysetting """ name = TRAIT_HUMIDITY_SETTING commands = [COMMAND_SET_HUMIDITY] @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" if domain == humidifier.DOMAIN: return True return domain == sensor.DOMAIN and device_class == sensor.DEVICE_CLASS_HUMIDITY def sync_attributes(self): """Return humidity attributes for a sync request.""" response = {} attrs = self.state.attributes domain = self.state.domain if domain == sensor.DOMAIN: device_class = attrs.get(ATTR_DEVICE_CLASS) if device_class == sensor.DEVICE_CLASS_HUMIDITY: response["queryOnlyHumiditySetting"] = True elif domain == humidifier.DOMAIN: response["humiditySetpointRange"] = { "minPercent": round( float(self.state.attributes[humidifier.ATTR_MIN_HUMIDITY]) ), "maxPercent": round( float(self.state.attributes[humidifier.ATTR_MAX_HUMIDITY]) ), } return response def query_attributes(self): """Return humidity query attributes.""" response = {} attrs = self.state.attributes domain = self.state.domain if domain == sensor.DOMAIN: device_class = attrs.get(ATTR_DEVICE_CLASS) if device_class == sensor.DEVICE_CLASS_HUMIDITY: current_humidity = self.state.state if current_humidity not in (STATE_UNKNOWN, STATE_UNAVAILABLE): response["humidityAmbientPercent"] = round(float(current_humidity)) elif domain == humidifier.DOMAIN: target_humidity = attrs.get(humidifier.ATTR_HUMIDITY) if target_humidity is not None: response["humiditySetpointPercent"] = round(float(target_humidity)) return response async def execute(self, command, data, params, challenge): """Execute a humidity command.""" domain = self.state.domain if domain == sensor.DOMAIN: raise SmartHomeError( ERR_NOT_SUPPORTED, "Execute is not supported by sensor" ) if command == COMMAND_SET_HUMIDITY: await self.hass.services.async_call( humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY, { ATTR_ENTITY_ID: self.state.entity_id, humidifier.ATTR_HUMIDITY: params["humidity"], }, blocking=True, context=data.context, ) @register_trait class LockUnlockTrait(_Trait): """Trait to lock or unlock a lock. https://developers.google.com/actions/smarthome/traits/lockunlock """ name = TRAIT_LOCKUNLOCK commands = [COMMAND_LOCKUNLOCK] @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" return domain == lock.DOMAIN @staticmethod def might_2fa(domain, features, device_class): """Return if the trait might ask for 2FA.""" return True def sync_attributes(self): """Return LockUnlock attributes for a sync request.""" return {} def query_attributes(self): """Return LockUnlock query attributes.""" return {"isLocked": self.state.state == STATE_LOCKED} async def execute(self, command, data, params, challenge): """Execute an LockUnlock command.""" if params["lock"]: service = lock.SERVICE_LOCK else: _verify_pin_challenge(data, self.state, challenge) service = lock.SERVICE_UNLOCK await self.hass.services.async_call( lock.DOMAIN, service, {ATTR_ENTITY_ID: self.state.entity_id}, blocking=True, context=data.context, ) @register_trait class ArmDisArmTrait(_Trait): """Trait to Arm or Disarm a Security System. https://developers.google.com/actions/smarthome/traits/armdisarm """ name = TRAIT_ARMDISARM commands = [COMMAND_ARMDISARM] state_to_service = { STATE_ALARM_ARMED_HOME: SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_AWAY: SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_NIGHT: SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_CUSTOM_BYPASS: SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_TRIGGERED: SERVICE_ALARM_TRIGGER, } state_to_support = { STATE_ALARM_ARMED_HOME: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME, STATE_ALARM_ARMED_AWAY: alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, STATE_ALARM_ARMED_NIGHT: alarm_control_panel.const.SUPPORT_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_CUSTOM_BYPASS: alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_TRIGGERED: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER, } @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" return domain == alarm_control_panel.DOMAIN @staticmethod def might_2fa(domain, features, device_class): """Return if the trait might ask for 2FA.""" return True def _supported_states(self): """Return supported states.""" features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) return [ state for state, required_feature in self.state_to_support.items() if features & required_feature != 0 ] def sync_attributes(self): """Return ArmDisarm attributes for a sync request.""" response = {} levels = [] for state in self._supported_states(): # level synonyms are generated from state names # 'armed_away' becomes 'armed away' or 'away' level_synonym = [state.replace("_", " ")] if state != STATE_ALARM_TRIGGERED: level_synonym.append(state.split("_")[1]) level = { "level_name": state, "level_values": [{"level_synonym": level_synonym, "lang": "en"}], } levels.append(level) response["availableArmLevels"] = {"levels": levels, "ordered": False} return response def query_attributes(self): """Return ArmDisarm query attributes.""" if "next_state" in self.state.attributes: armed_state = self.state.attributes["next_state"] else: armed_state = self.state.state response = {"isArmed": armed_state in self.state_to_service} if response["isArmed"]: response.update({"currentArmLevel": armed_state}) return response async def execute(self, command, data, params, challenge): """Execute an ArmDisarm command.""" if params["arm"] and not params.get("cancel"): arm_level = params.get("armLevel") # If no arm level given, we can only arm it if there is # only one supported arm type. We never default to triggered. if not arm_level: states = self._supported_states() if STATE_ALARM_TRIGGERED in states: states.remove(STATE_ALARM_TRIGGERED) if len(states) != 1: raise SmartHomeError(ERR_NOT_SUPPORTED, "ArmLevel missing") arm_level = states[0] if self.state.state == arm_level: raise SmartHomeError(ERR_ALREADY_ARMED, "System is already armed") if self.state.attributes["code_arm_required"]: _verify_pin_challenge(data, self.state, challenge) service = self.state_to_service[arm_level] # disarm the system without asking for code when # 'cancel' arming action is received while current status is pending elif ( params["arm"] and params.get("cancel") and self.state.state == STATE_ALARM_PENDING ): service = SERVICE_ALARM_DISARM else: if self.state.state == STATE_ALARM_DISARMED: raise SmartHomeError(ERR_ALREADY_DISARMED, "System is already disarmed") _verify_pin_challenge(data, self.state, challenge) service = SERVICE_ALARM_DISARM await self.hass.services.async_call( alarm_control_panel.DOMAIN, service, { ATTR_ENTITY_ID: self.state.entity_id, ATTR_CODE: data.config.secure_devices_pin, }, blocking=True, context=data.context, ) @register_trait class FanSpeedTrait(_Trait): """Trait to control speed of Fan. https://developers.google.com/actions/smarthome/traits/fanspeed """ name = TRAIT_FANSPEED commands = [COMMAND_FANSPEED] speed_synonyms = { fan.SPEED_OFF: ["stop", "off"], fan.SPEED_LOW: ["slow", "low", "slowest", "lowest"], fan.SPEED_MEDIUM: ["medium", "mid", "middle"], fan.SPEED_HIGH: ["high", "max", "fast", "highest", "fastest", "maximum"], } @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" if domain == fan.DOMAIN: return features & fan.SUPPORT_SET_SPEED if domain == climate.DOMAIN: return features & climate.SUPPORT_FAN_MODE return False def sync_attributes(self): """Return speed point and modes attributes for a sync request.""" domain = self.state.domain speeds = [] reversible = False if domain == fan.DOMAIN: modes = self.state.attributes.get(fan.ATTR_SPEED_LIST, []) for mode in modes: if mode not in self.speed_synonyms: continue speed = { "speed_name": mode, "speed_values": [ {"speed_synonym": self.speed_synonyms.get(mode), "lang": "en"} ], } speeds.append(speed) reversible = bool( self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) & fan.SUPPORT_DIRECTION ) elif domain == climate.DOMAIN: modes = self.state.attributes.get(climate.ATTR_FAN_MODES, []) for mode in modes: speed = { "speed_name": mode, "speed_values": [{"speed_synonym": [mode], "lang": "en"}], } speeds.append(speed) return { "availableFanSpeeds": {"speeds": speeds, "ordered": True}, "reversible": reversible, "supportsFanSpeedPercent": True, } def query_attributes(self): """Return speed point and modes query attributes.""" attrs = self.state.attributes domain = self.state.domain response = {} if domain == climate.DOMAIN: speed = attrs.get(climate.ATTR_FAN_MODE) if speed is not None: response["currentFanSpeedSetting"] = speed if domain == fan.DOMAIN: speed = attrs.get(fan.ATTR_SPEED) percent = attrs.get(fan.ATTR_PERCENTAGE) or 0 if speed is not None: response["on"] = speed != fan.SPEED_OFF response["currentFanSpeedSetting"] = speed response["currentFanSpeedPercent"] = percent return response async def execute(self, command, data, params, challenge): """Execute an SetFanSpeed command.""" domain = self.state.domain if domain == climate.DOMAIN: await self.hass.services.async_call( climate.DOMAIN, climate.SERVICE_SET_FAN_MODE, { ATTR_ENTITY_ID: self.state.entity_id, climate.ATTR_FAN_MODE: params["fanSpeed"], }, blocking=True, context=data.context, ) if domain == fan.DOMAIN: service_params = { ATTR_ENTITY_ID: self.state.entity_id, } if "fanSpeedPercent" in params: service = fan.SERVICE_SET_PERCENTAGE service_params[fan.ATTR_PERCENTAGE] = params["fanSpeedPercent"] else: service = fan.SERVICE_SET_SPEED service_params[fan.ATTR_SPEED] = params["fanSpeed"] await self.hass.services.async_call( fan.DOMAIN, service, service_params, blocking=True, context=data.context, ) @register_trait class ModesTrait(_Trait): """Trait to set modes. https://developers.google.com/actions/smarthome/traits/modes """ name = TRAIT_MODES commands = [COMMAND_MODES] SYNONYMS = { "sound mode": ["sound mode", "effects"], "option": ["option", "setting", "mode", "value"], } @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" if domain == input_select.DOMAIN: return True if domain == humidifier.DOMAIN and features & humidifier.SUPPORT_MODES: return True if domain == light.DOMAIN and features & light.SUPPORT_EFFECT: return True if domain != media_player.DOMAIN: return False return features & media_player.SUPPORT_SELECT_SOUND_MODE def _generate(self, name, settings): """Generate a list of modes.""" mode = { "name": name, "name_values": [ {"name_synonym": self.SYNONYMS.get(name, [name]), "lang": "en"} ], "settings": [], "ordered": False, } for setting in settings: mode["settings"].append( { "setting_name": setting, "setting_values": [ { "setting_synonym": self.SYNONYMS.get(setting, [setting]), "lang": "en", } ], } ) return mode def sync_attributes(self): """Return mode attributes for a sync request.""" modes = [] for domain, attr, name in ( (media_player.DOMAIN, media_player.ATTR_SOUND_MODE_LIST, "sound mode"), (input_select.DOMAIN, input_select.ATTR_OPTIONS, "option"), (humidifier.DOMAIN, humidifier.ATTR_AVAILABLE_MODES, "mode"), (light.DOMAIN, light.ATTR_EFFECT_LIST, "effect"), ): if self.state.domain != domain: continue items = self.state.attributes.get(attr) if items is not None: modes.append(self._generate(name, items)) # Shortcut since all domains are currently unique break payload = {"availableModes": modes} return payload def query_attributes(self): """Return current modes.""" attrs = self.state.attributes response = {} mode_settings = {} if self.state.domain == media_player.DOMAIN: if media_player.ATTR_SOUND_MODE_LIST in attrs: mode_settings["sound mode"] = attrs.get(media_player.ATTR_SOUND_MODE) elif self.state.domain == input_select.DOMAIN: mode_settings["option"] = self.state.state elif self.state.domain == humidifier.DOMAIN: if ATTR_MODE in attrs: mode_settings["mode"] = attrs.get(ATTR_MODE) elif self.state.domain == light.DOMAIN and light.ATTR_EFFECT in attrs: mode_settings["effect"] = attrs.get(light.ATTR_EFFECT) if mode_settings: response["on"] = self.state.state not in (STATE_OFF, STATE_UNKNOWN) response["currentModeSettings"] = mode_settings return response async def execute(self, command, data, params, challenge): """Execute a SetModes command.""" settings = params.get("updateModeSettings") if self.state.domain == input_select.DOMAIN: option = params["updateModeSettings"]["option"] await self.hass.services.async_call( input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION, { ATTR_ENTITY_ID: self.state.entity_id, input_select.ATTR_OPTION: option, }, blocking=True, context=data.context, ) return if self.state.domain == humidifier.DOMAIN: requested_mode = settings["mode"] await self.hass.services.async_call( humidifier.DOMAIN, humidifier.SERVICE_SET_MODE, { ATTR_MODE: requested_mode, ATTR_ENTITY_ID: self.state.entity_id, }, blocking=True, context=data.context, ) return if self.state.domain == light.DOMAIN: requested_effect = settings["effect"] await self.hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, { ATTR_ENTITY_ID: self.state.entity_id, light.ATTR_EFFECT: requested_effect, }, blocking=True, context=data.context, ) return if self.state.domain != media_player.DOMAIN: _LOGGER.info( "Received an Options command for unrecognised domain %s", self.state.domain, ) return sound_mode = settings.get("sound mode") if sound_mode: await self.hass.services.async_call( media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE, { ATTR_ENTITY_ID: self.state.entity_id, media_player.ATTR_SOUND_MODE: sound_mode, }, blocking=True, context=data.context, ) @register_trait class InputSelectorTrait(_Trait): """Trait to set modes. https://developers.google.com/assistant/smarthome/traits/inputselector """ name = TRAIT_INPUTSELECTOR commands = [COMMAND_INPUT, COMMAND_NEXT_INPUT, COMMAND_PREVIOUS_INPUT] SYNONYMS = {} @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" if domain == media_player.DOMAIN and ( features & media_player.SUPPORT_SELECT_SOURCE ): return True return False def sync_attributes(self): """Return mode attributes for a sync request.""" attrs = self.state.attributes inputs = [ {"key": source, "names": [{"name_synonym": [source], "lang": "en"}]} for source in attrs.get(media_player.ATTR_INPUT_SOURCE_LIST, []) ] payload = {"availableInputs": inputs, "orderedInputs": True} return payload def query_attributes(self): """Return current modes.""" attrs = self.state.attributes return {"currentInput": attrs.get(media_player.ATTR_INPUT_SOURCE, "")} async def execute(self, command, data, params, challenge): """Execute an SetInputSource command.""" sources = self.state.attributes.get(media_player.ATTR_INPUT_SOURCE_LIST) or [] source = self.state.attributes.get(media_player.ATTR_INPUT_SOURCE) if command == COMMAND_INPUT: requested_source = params.get("newInput") elif command == COMMAND_NEXT_INPUT: requested_source = _next_selected(sources, source) elif command == COMMAND_PREVIOUS_INPUT: requested_source = _next_selected(list(reversed(sources)), source) else: raise SmartHomeError(ERR_NOT_SUPPORTED, "Unsupported command") if requested_source not in sources: raise SmartHomeError(ERR_UNSUPPORTED_INPUT, "Unsupported input") await self.hass.services.async_call( media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE, { ATTR_ENTITY_ID: self.state.entity_id, media_player.ATTR_INPUT_SOURCE: requested_source, }, blocking=True, context=data.context, ) @register_trait class OpenCloseTrait(_Trait): """Trait to open and close a cover. https://developers.google.com/actions/smarthome/traits/openclose """ # Cover device classes that require 2FA COVER_2FA = ( cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE, ) name = TRAIT_OPENCLOSE commands = [COMMAND_OPENCLOSE, COMMAND_OPENCLOSE_RELATIVE] @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" if domain == cover.DOMAIN: return True return domain == binary_sensor.DOMAIN and device_class in ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ) @staticmethod def might_2fa(domain, features, device_class): """Return if the trait might ask for 2FA.""" return domain == cover.DOMAIN and device_class in OpenCloseTrait.COVER_2FA def sync_attributes(self): """Return opening direction.""" response = {} features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) if self.state.domain == binary_sensor.DOMAIN: response["queryOnlyOpenClose"] = True response["discreteOnlyOpenClose"] = True elif ( self.state.domain == cover.DOMAIN and features & cover.SUPPORT_SET_POSITION == 0 ): response["discreteOnlyOpenClose"] = True if ( features & cover.SUPPORT_OPEN == 0 and features & cover.SUPPORT_CLOSE == 0 ): response["queryOnlyOpenClose"] = True if self.state.attributes.get(ATTR_ASSUMED_STATE): response["commandOnlyOpenClose"] = True return response def query_attributes(self): """Return state query attributes.""" domain = self.state.domain response = {} # When it's an assumed state, we will return empty state # This shouldn't happen because we set `commandOnlyOpenClose` # but Google still queries. Erroring here will cause device # to show up offline. if self.state.attributes.get(ATTR_ASSUMED_STATE): return response if domain == cover.DOMAIN: if self.state.state == STATE_UNKNOWN: raise SmartHomeError( ERR_NOT_SUPPORTED, "Querying state is not supported" ) position = self.state.attributes.get(cover.ATTR_CURRENT_POSITION) if position is not None: response["openPercent"] = position elif self.state.state != cover.STATE_CLOSED: response["openPercent"] = 100 else: response["openPercent"] = 0 elif domain == binary_sensor.DOMAIN: if self.state.state == STATE_ON: response["openPercent"] = 100 else: response["openPercent"] = 0 return response async def execute(self, command, data, params, challenge): """Execute an Open, close, Set position command.""" domain = self.state.domain features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) if domain == cover.DOMAIN: svc_params = {ATTR_ENTITY_ID: self.state.entity_id} should_verify = False if command == COMMAND_OPENCLOSE_RELATIVE: position = self.state.attributes.get(cover.ATTR_CURRENT_POSITION) if position is None: raise SmartHomeError( ERR_NOT_SUPPORTED, "Current position not know for relative command", ) position = max(0, min(100, position + params["openRelativePercent"])) else: position = params["openPercent"] if position == 0: service = cover.SERVICE_CLOSE_COVER should_verify = False elif position == 100: service = cover.SERVICE_OPEN_COVER should_verify = True elif features & cover.SUPPORT_SET_POSITION: service = cover.SERVICE_SET_COVER_POSITION if position > 0: should_verify = True svc_params[cover.ATTR_POSITION] = position else: raise SmartHomeError( ERR_NOT_SUPPORTED, "No support for partial open close" ) if ( should_verify and self.state.attributes.get(ATTR_DEVICE_CLASS) in OpenCloseTrait.COVER_2FA ): _verify_pin_challenge(data, self.state, challenge) await self.hass.services.async_call( cover.DOMAIN, service, svc_params, blocking=True, context=data.context ) @register_trait class VolumeTrait(_Trait): """Trait to control volume of a device. https://developers.google.com/actions/smarthome/traits/volume """ name = TRAIT_VOLUME commands = [COMMAND_SET_VOLUME, COMMAND_VOLUME_RELATIVE, COMMAND_MUTE] @staticmethod def supported(domain, features, device_class): """Test if trait is supported.""" if domain == media_player.DOMAIN: return features & ( media_player.SUPPORT_VOLUME_SET | media_player.SUPPORT_VOLUME_STEP ) return False def sync_attributes(self): """Return volume attributes for a sync request.""" features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) return { "volumeCanMuteAndUnmute": bool(features & media_player.SUPPORT_VOLUME_MUTE), "commandOnlyVolume": self.state.attributes.get(ATTR_ASSUMED_STATE, False), # Volume amounts in SET_VOLUME and VOLUME_RELATIVE are on a scale # from 0 to this value. "volumeMaxLevel": 100, # Default change for queries like "Hey Google, volume up". # 10% corresponds to the default behavior for the # media_player.volume{up,down} services. "levelStepSize": 10, } def query_attributes(self): """Return volume query attributes.""" response = {} level = self.state.attributes.get(media_player.ATTR_MEDIA_VOLUME_LEVEL) if level is not None: # Convert 0.0-1.0 to 0-100 response["currentVolume"] = int(level * 100) muted = self.state.attributes.get(media_player.ATTR_MEDIA_VOLUME_MUTED) if muted is not None: response["isMuted"] = bool(muted) return response async def _set_volume_absolute(self, data, level): await self.hass.services.async_call( media_player.DOMAIN, media_player.SERVICE_VOLUME_SET, { ATTR_ENTITY_ID: self.state.entity_id, media_player.ATTR_MEDIA_VOLUME_LEVEL: level, }, blocking=True, context=data.context, ) async def _execute_set_volume(self, data, params): level = max(0, min(100, params["volumeLevel"])) if not ( self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) & media_player.SUPPORT_VOLUME_SET ): raise SmartHomeError(ERR_NOT_SUPPORTED, "Command not supported") await self._set_volume_absolute(data, level / 100) async def _execute_volume_relative(self, data, params): relative = params["relativeSteps"] features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) if features & media_player.SUPPORT_VOLUME_SET: current = self.state.attributes.get(media_player.ATTR_MEDIA_VOLUME_LEVEL) target = max(0.0, min(1.0, current + relative / 100)) await self._set_volume_absolute(data, target) elif features & media_player.SUPPORT_VOLUME_STEP: svc = media_player.SERVICE_VOLUME_UP if relative < 0: svc = media_player.SERVICE_VOLUME_DOWN relative = -relative for _ in range(relative): await self.hass.services.async_call( media_player.DOMAIN, svc, {ATTR_ENTITY_ID: self.state.entity_id}, blocking=True, context=data.context, ) else: raise SmartHomeError(ERR_NOT_SUPPORTED, "Command not supported") async def _execute_mute(self, data, params): mute = params["mute"] if not ( self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) & media_player.SUPPORT_VOLUME_MUTE ): raise SmartHomeError(ERR_NOT_SUPPORTED, "Command not supported") await self.hass.services.async_call( media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE, { ATTR_ENTITY_ID: self.state.entity_id, media_player.ATTR_MEDIA_VOLUME_MUTED: mute, }, blocking=True, context=data.context, ) async def execute(self, command, data, params, challenge): """Execute a volume command.""" if command == COMMAND_SET_VOLUME: await self._execute_set_volume(data, params) elif command == COMMAND_VOLUME_RELATIVE: await self._execute_volume_relative(data, params) elif command == COMMAND_MUTE: await self._execute_mute(data, params) else: raise SmartHomeError(ERR_NOT_SUPPORTED, "Command not supported") def _verify_pin_challenge(data, state, challenge): """Verify a pin challenge.""" if not data.config.should_2fa(state): return if not data.config.secure_devices_pin: raise SmartHomeError(ERR_CHALLENGE_NOT_SETUP, "Challenge is not set up") if not challenge: raise ChallengeNeeded(CHALLENGE_PIN_NEEDED) pin = challenge.get("pin") if pin != data.config.secure_devices_pin: raise ChallengeNeeded(CHALLENGE_FAILED_PIN_NEEDED) def _verify_ack_challenge(data, state, challenge): """Verify an ack challenge.""" if not data.config.should_2fa(state): return if not challenge or not challenge.get("ack"): raise ChallengeNeeded(CHALLENGE_ACK_NEEDED) MEDIA_COMMAND_SUPPORT_MAPPING = { COMMAND_MEDIA_NEXT: media_player.SUPPORT_NEXT_TRACK, COMMAND_MEDIA_PAUSE: media_player.SUPPORT_PAUSE, COMMAND_MEDIA_PREVIOUS: media_player.SUPPORT_PREVIOUS_TRACK, COMMAND_MEDIA_RESUME: media_player.SUPPORT_PLAY, COMMAND_MEDIA_SEEK_RELATIVE: media_player.SUPPORT_SEEK, COMMAND_MEDIA_SEEK_TO_POSITION: media_player.SUPPORT_SEEK, COMMAND_MEDIA_SHUFFLE: media_player.SUPPORT_SHUFFLE_SET, COMMAND_MEDIA_STOP: media_player.SUPPORT_STOP, } MEDIA_COMMAND_ATTRIBUTES = { COMMAND_MEDIA_NEXT: "NEXT", COMMAND_MEDIA_PAUSE: "PAUSE", COMMAND_MEDIA_PREVIOUS: "PREVIOUS", COMMAND_MEDIA_RESUME: "RESUME", COMMAND_MEDIA_SEEK_RELATIVE: "SEEK_RELATIVE", COMMAND_MEDIA_SEEK_TO_POSITION: "SEEK_TO_POSITION", COMMAND_MEDIA_SHUFFLE: "SHUFFLE", COMMAND_MEDIA_STOP: "STOP", } @register_trait class TransportControlTrait(_Trait): """Trait to control media playback. https://developers.google.com/actions/smarthome/traits/transportcontrol """ name = TRAIT_TRANSPORT_CONTROL commands = [ COMMAND_MEDIA_NEXT, COMMAND_MEDIA_PAUSE, COMMAND_MEDIA_PREVIOUS, COMMAND_MEDIA_RESUME, COMMAND_MEDIA_SEEK_RELATIVE, COMMAND_MEDIA_SEEK_TO_POSITION, COMMAND_MEDIA_SHUFFLE, COMMAND_MEDIA_STOP, ] @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" if domain == media_player.DOMAIN: for feature in MEDIA_COMMAND_SUPPORT_MAPPING.values(): if features & feature: return True return False def sync_attributes(self): """Return opening direction.""" response = {} if self.state.domain == media_player.DOMAIN: features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) support = [] for command, feature in MEDIA_COMMAND_SUPPORT_MAPPING.items(): if features & feature: support.append(MEDIA_COMMAND_ATTRIBUTES[command]) response["transportControlSupportedCommands"] = support return response def query_attributes(self): """Return the attributes of this trait for this entity.""" return {} async def execute(self, command, data, params, challenge): """Execute a media command.""" service_attrs = {ATTR_ENTITY_ID: self.state.entity_id} if command == COMMAND_MEDIA_SEEK_RELATIVE: service = media_player.SERVICE_MEDIA_SEEK rel_position = params["relativePositionMs"] / 1000 seconds_since = 0 # Default to 0 seconds if self.state.state == STATE_PLAYING: now = dt.utcnow() upd_at = self.state.attributes.get( media_player.ATTR_MEDIA_POSITION_UPDATED_AT, now ) seconds_since = (now - upd_at).total_seconds() position = self.state.attributes.get(media_player.ATTR_MEDIA_POSITION, 0) max_position = self.state.attributes.get( media_player.ATTR_MEDIA_DURATION, 0 ) service_attrs[media_player.ATTR_MEDIA_SEEK_POSITION] = min( max(position + seconds_since + rel_position, 0), max_position ) elif command == COMMAND_MEDIA_SEEK_TO_POSITION: service = media_player.SERVICE_MEDIA_SEEK max_position = self.state.attributes.get( media_player.ATTR_MEDIA_DURATION, 0 ) service_attrs[media_player.ATTR_MEDIA_SEEK_POSITION] = min( max(params["absPositionMs"] / 1000, 0), max_position ) elif command == COMMAND_MEDIA_NEXT: service = media_player.SERVICE_MEDIA_NEXT_TRACK elif command == COMMAND_MEDIA_PAUSE: service = media_player.SERVICE_MEDIA_PAUSE elif command == COMMAND_MEDIA_PREVIOUS: service = media_player.SERVICE_MEDIA_PREVIOUS_TRACK elif command == COMMAND_MEDIA_RESUME: service = media_player.SERVICE_MEDIA_PLAY elif command == COMMAND_MEDIA_SHUFFLE: service = media_player.SERVICE_SHUFFLE_SET # Google Assistant only supports enabling shuffle service_attrs[media_player.ATTR_MEDIA_SHUFFLE] = True elif command == COMMAND_MEDIA_STOP: service = media_player.SERVICE_MEDIA_STOP else: raise SmartHomeError(ERR_NOT_SUPPORTED, "Command not supported") await self.hass.services.async_call( media_player.DOMAIN, service, service_attrs, blocking=True, context=data.context, ) @register_trait class MediaStateTrait(_Trait): """Trait to get media playback state. https://developers.google.com/actions/smarthome/traits/mediastate """ name = TRAIT_MEDIA_STATE commands = [] activity_lookup = { STATE_OFF: "INACTIVE", STATE_IDLE: "STANDBY", STATE_PLAYING: "ACTIVE", STATE_ON: "STANDBY", STATE_PAUSED: "STANDBY", STATE_STANDBY: "STANDBY", STATE_UNAVAILABLE: "INACTIVE", STATE_UNKNOWN: "INACTIVE", } playback_lookup = { STATE_OFF: "STOPPED", STATE_IDLE: "STOPPED", STATE_PLAYING: "PLAYING", STATE_ON: "STOPPED", STATE_PAUSED: "PAUSED", STATE_STANDBY: "STOPPED", STATE_UNAVAILABLE: "STOPPED", STATE_UNKNOWN: "STOPPED", } @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" return domain == media_player.DOMAIN def sync_attributes(self): """Return attributes for a sync request.""" return {"supportActivityState": True, "supportPlaybackState": True} def query_attributes(self): """Return the attributes of this trait for this entity.""" return { "activityState": self.activity_lookup.get(self.state.state, "INACTIVE"), "playbackState": self.playback_lookup.get(self.state.state, "STOPPED"), }
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/google_assistant/trait.py
"""Support for Z-Wave lights.""" from __future__ import annotations import logging from typing import Any, Callable from zwave_js_server.client import Client as ZwaveClient from zwave_js_server.const import ColorComponent, CommandClass from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_WHITE_VALUE, DOMAIN as LIGHT_DOMAIN, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_TRANSITION, SUPPORT_WHITE_VALUE, LightEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect import homeassistant.util.color as color_util from .const import DATA_CLIENT, DATA_UNSUBSCRIBE, DOMAIN from .discovery import ZwaveDiscoveryInfo from .entity import ZWaveBaseEntity LOGGER = logging.getLogger(__name__) MULTI_COLOR_MAP = { ColorComponent.WARM_WHITE: "warmWhite", ColorComponent.COLD_WHITE: "coldWhite", ColorComponent.RED: "red", ColorComponent.GREEN: "green", ColorComponent.BLUE: "blue", ColorComponent.AMBER: "amber", ColorComponent.CYAN: "cyan", ColorComponent.PURPLE: "purple", } async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: Callable ) -> None: """Set up Z-Wave Light from Config Entry.""" client: ZwaveClient = hass.data[DOMAIN][config_entry.entry_id][DATA_CLIENT] @callback def async_add_light(info: ZwaveDiscoveryInfo) -> None: """Add Z-Wave Light.""" light = ZwaveLight(config_entry, client, info) async_add_entities([light]) hass.data[DOMAIN][config_entry.entry_id][DATA_UNSUBSCRIBE].append( async_dispatcher_connect( hass, f"{DOMAIN}_{config_entry.entry_id}_add_{LIGHT_DOMAIN}", async_add_light, ) ) def byte_to_zwave_brightness(value: int) -> int: """Convert brightness in 0-255 scale to 0-99 scale. `value` -- (int) Brightness byte value from 0-255. """ if value > 0: return max(1, round((value / 255) * 99)) return 0 class ZwaveLight(ZWaveBaseEntity, LightEntity): """Representation of a Z-Wave light.""" def __init__( self, config_entry: ConfigEntry, client: ZwaveClient, info: ZwaveDiscoveryInfo ) -> None: """Initialize the light.""" super().__init__(config_entry, client, info) self._supports_color = False self._supports_white_value = False self._supports_color_temp = False self._hs_color: tuple[float, float] | None = None self._white_value: int | None = None self._color_temp: int | None = None self._min_mireds = 153 # 6500K as a safe default self._max_mireds = 370 # 2700K as a safe default self._supported_features = SUPPORT_BRIGHTNESS # get additional (optional) values and set features self._target_value = self.get_zwave_value("targetValue") self._dimming_duration = self.get_zwave_value("duration") if self._dimming_duration is not None: self._supported_features |= SUPPORT_TRANSITION self._calculate_color_values() if self._supports_color: self._supported_features |= SUPPORT_COLOR if self._supports_color_temp: self._supported_features |= SUPPORT_COLOR_TEMP if self._supports_white_value: self._supported_features |= SUPPORT_WHITE_VALUE @callback def on_value_update(self) -> None: """Call when a watched value is added or updated.""" self._calculate_color_values() @property def brightness(self) -> int: """Return the brightness of this light between 0..255. Z-Wave multilevel switches use a range of [0, 99] to control brightness. """ if self.info.primary_value.value is not None: return round((self.info.primary_value.value / 99) * 255) return 0 @property def is_on(self) -> bool: """Return true if device is on (brightness above 0).""" return self.brightness > 0 @property def hs_color(self) -> tuple[float, float] | None: """Return the hs color.""" return self._hs_color @property def white_value(self) -> int | None: """Return the white value of this light between 0..255.""" return self._white_value @property def color_temp(self) -> int | None: """Return the color temperature.""" return self._color_temp @property def min_mireds(self) -> int: """Return the coldest color_temp that this light supports.""" return self._min_mireds @property def max_mireds(self) -> int: """Return the warmest color_temp that this light supports.""" return self._max_mireds @property def supported_features(self) -> int: """Flag supported features.""" return self._supported_features async def async_turn_on(self, **kwargs: Any) -> None: """Turn the device on.""" # RGB/HS color hs_color = kwargs.get(ATTR_HS_COLOR) if hs_color is not None and self._supports_color: red, green, blue = color_util.color_hs_to_RGB(*hs_color) colors = { ColorComponent.RED: red, ColorComponent.GREEN: green, ColorComponent.BLUE: blue, } if self._supports_color_temp: # turn of white leds when setting rgb colors[ColorComponent.WARM_WHITE] = 0 colors[ColorComponent.COLD_WHITE] = 0 await self._async_set_colors(colors) # Color temperature color_temp = kwargs.get(ATTR_COLOR_TEMP) if color_temp is not None and self._supports_color_temp: # Limit color temp to min/max values cold = max( 0, min( 255, round( (self._max_mireds - color_temp) / (self._max_mireds - self._min_mireds) * 255 ), ), ) warm = 255 - cold await self._async_set_colors( { # turn off color leds when setting color temperature ColorComponent.RED: 0, ColorComponent.GREEN: 0, ColorComponent.BLUE: 0, ColorComponent.WARM_WHITE: warm, ColorComponent.COLD_WHITE: cold, } ) # White value white_value = kwargs.get(ATTR_WHITE_VALUE) if white_value is not None and self._supports_white_value: # white led brightness is controlled by white level # rgb leds (if any) can be on at the same time await self._async_set_colors( { ColorComponent.WARM_WHITE: white_value, ColorComponent.COLD_WHITE: white_value, } ) # set brightness await self._async_set_brightness( kwargs.get(ATTR_BRIGHTNESS), kwargs.get(ATTR_TRANSITION) ) async def async_turn_off(self, **kwargs: Any) -> None: """Turn the light off.""" await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) async def _async_set_colors(self, colors: dict[ColorComponent, int]) -> None: """Set (multiple) defined colors to given value(s).""" # prefer the (new) combined color property # https://github.com/zwave-js/node-zwave-js/pull/1782 combined_color_val = self.get_zwave_value( "targetColor", CommandClass.SWITCH_COLOR, value_property_key=None, ) if combined_color_val and isinstance(combined_color_val.value, dict): colors_dict = {} for color, value in colors.items(): color_name = MULTI_COLOR_MAP[color] colors_dict[color_name] = value # set updated color object await self.info.node.async_set_value(combined_color_val, colors_dict) return # fallback to setting the color(s) one by one if multicolor fails # not sure this is needed at all, but just in case for color, value in colors.items(): await self._async_set_color(color, value) async def _async_set_color(self, color: ColorComponent, new_value: int) -> None: """Set defined color to given value.""" # actually set the new color value target_zwave_value = self.get_zwave_value( "targetColor", CommandClass.SWITCH_COLOR, value_property_key=color.value, ) if target_zwave_value is None: # guard for unsupported color return await self.info.node.async_set_value(target_zwave_value, new_value) async def _async_set_brightness( self, brightness: int | None, transition: int | None = None ) -> None: """Set new brightness to light.""" if brightness is None: # Level 255 means to set it to previous value. zwave_brightness = 255 else: # Zwave multilevel switches use a range of [0, 99] to control brightness. zwave_brightness = byte_to_zwave_brightness(brightness) # set transition value before sending new brightness await self._async_set_transition_duration(transition) # setting a value requires setting targetValue await self.info.node.async_set_value(self._target_value, zwave_brightness) async def _async_set_transition_duration(self, duration: int | None = None) -> None: """Set the transition time for the brightness value.""" if self._dimming_duration is None: return # pylint: disable=fixme,unreachable # TODO: setting duration needs to be fixed upstream # https://github.com/zwave-js/node-zwave-js/issues/1321 return if duration is None: # type: ignore # no transition specified by user, use defaults duration = 7621 # anything over 7620 uses the factory default else: # pragma: no cover # transition specified by user transition = duration if transition <= 127: duration = transition else: minutes = round(transition / 60) LOGGER.debug( "Transition rounded to %d minutes for %s", minutes, self.entity_id, ) duration = minutes + 128 # only send value if it differs from current # this prevents sending a command for nothing if self._dimming_duration.value != duration: # pragma: no cover await self.info.node.async_set_value(self._dimming_duration, duration) @callback def _calculate_color_values(self) -> None: """Calculate light colors.""" # NOTE: We lookup all values here (instead of relying on the multicolor one) # to find out what colors are supported # as this is a simple lookup by key, this not heavy red_val = self.get_zwave_value( "currentColor", CommandClass.SWITCH_COLOR, value_property_key=ColorComponent.RED.value, ) green_val = self.get_zwave_value( "currentColor", CommandClass.SWITCH_COLOR, value_property_key=ColorComponent.GREEN.value, ) blue_val = self.get_zwave_value( "currentColor", CommandClass.SWITCH_COLOR, value_property_key=ColorComponent.BLUE.value, ) ww_val = self.get_zwave_value( "currentColor", CommandClass.SWITCH_COLOR, value_property_key=ColorComponent.WARM_WHITE.value, ) cw_val = self.get_zwave_value( "currentColor", CommandClass.SWITCH_COLOR, value_property_key=ColorComponent.COLD_WHITE.value, ) # prefer the (new) combined color property # https://github.com/zwave-js/node-zwave-js/pull/1782 combined_color_val = self.get_zwave_value( "currentColor", CommandClass.SWITCH_COLOR, value_property_key=None, ) if combined_color_val and isinstance(combined_color_val.value, dict): multi_color = combined_color_val.value else: multi_color = {} # RGB support if red_val and green_val and blue_val: # prefer values from the multicolor property red = multi_color.get("red", red_val.value) green = multi_color.get("green", green_val.value) blue = multi_color.get("blue", blue_val.value) self._supports_color = True # convert to HS self._hs_color = color_util.color_RGB_to_hs(red, green, blue) # color temperature support if ww_val and cw_val: self._supports_color_temp = True warm_white = multi_color.get("warmWhite", ww_val.value) cold_white = multi_color.get("coldWhite", cw_val.value) # Calculate color temps based on whites if cold_white or warm_white: self._color_temp = round( self._max_mireds - ((cold_white / 255) * (self._max_mireds - self._min_mireds)) ) else: self._color_temp = None # only one white channel (warm white) = white_level support elif ww_val: self._supports_white_value = True self._white_value = multi_color.get("warmWhite", ww_val.value) # only one white channel (cool white) = white_level support elif cw_val: self._supports_white_value = True self._white_value = multi_color.get("coldWhite", cw_val.value)
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/zwave_js/light.py
"""Support for Acmeda Roller Blinds.""" from homeassistant.components.cover import ( ATTR_POSITION, SUPPORT_CLOSE, SUPPORT_CLOSE_TILT, SUPPORT_OPEN, SUPPORT_OPEN_TILT, SUPPORT_SET_POSITION, SUPPORT_SET_TILT_POSITION, SUPPORT_STOP, SUPPORT_STOP_TILT, CoverEntity, ) from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from .base import AcmedaBase from .const import ACMEDA_HUB_UPDATE, DOMAIN from .helpers import async_add_acmeda_entities async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Acmeda Rollers from a config entry.""" hub = hass.data[DOMAIN][config_entry.entry_id] current = set() @callback def async_add_acmeda_covers(): async_add_acmeda_entities( hass, AcmedaCover, config_entry, current, async_add_entities ) hub.cleanup_callbacks.append( async_dispatcher_connect( hass, ACMEDA_HUB_UPDATE.format(config_entry.entry_id), async_add_acmeda_covers, ) ) class AcmedaCover(AcmedaBase, CoverEntity): """Representation of a Acmeda cover device.""" @property def current_cover_position(self): """Return the current position of the roller blind. None is unknown, 0 is closed, 100 is fully open. """ position = None if self.roller.type != 7: position = 100 - self.roller.closed_percent return position @property def current_cover_tilt_position(self): """Return the current tilt of the roller blind. None is unknown, 0 is closed, 100 is fully open. """ position = None if self.roller.type in [7, 10]: position = 100 - self.roller.closed_percent return position @property def supported_features(self): """Flag supported features.""" supported_features = 0 if self.current_cover_position is not None: supported_features |= ( SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_STOP | SUPPORT_SET_POSITION ) if self.current_cover_tilt_position is not None: supported_features |= ( SUPPORT_OPEN_TILT | SUPPORT_CLOSE_TILT | SUPPORT_STOP_TILT | SUPPORT_SET_TILT_POSITION ) return supported_features @property def is_closed(self): """Return if the cover is closed.""" return self.roller.closed_percent == 100 async def async_close_cover(self, **kwargs): """Close the roller.""" await self.roller.move_down() async def async_open_cover(self, **kwargs): """Open the roller.""" await self.roller.move_up() async def async_stop_cover(self, **kwargs): """Stop the roller.""" await self.roller.move_stop() async def async_set_cover_position(self, **kwargs): """Move the roller shutter to a specific position.""" await self.roller.move_to(100 - kwargs[ATTR_POSITION]) async def async_close_cover_tilt(self, **kwargs): """Close the roller.""" await self.roller.move_down() async def async_open_cover_tilt(self, **kwargs): """Open the roller.""" await self.roller.move_up() async def async_stop_cover_tilt(self, **kwargs): """Stop the roller.""" await self.roller.move_stop() async def async_set_cover_tilt(self, **kwargs): """Tilt the roller shutter to a specific position.""" await self.roller.move_to(100 - kwargs[ATTR_POSITION])
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/acmeda/cover.py
"""Support for Nest Cameras.""" from datetime import timedelta import logging import requests from homeassistant.components.camera import PLATFORM_SCHEMA, SUPPORT_ON_OFF, Camera from homeassistant.util.dt import utcnow from .const import DATA_NEST, DOMAIN _LOGGER = logging.getLogger(__name__) NEST_BRAND = "Nest" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({}) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up a Nest Cam. No longer in use. """ async def async_setup_legacy_entry(hass, entry, async_add_entities): """Set up a Nest sensor based on a config entry.""" camera_devices = await hass.async_add_executor_job(hass.data[DATA_NEST].cameras) cameras = [NestCamera(structure, device) for structure, device in camera_devices] async_add_entities(cameras, True) class NestCamera(Camera): """Representation of a Nest Camera.""" def __init__(self, structure, device): """Initialize a Nest Camera.""" super().__init__() self.structure = structure self.device = device self._location = None self._name = None self._online = None self._is_streaming = None self._is_video_history_enabled = False # Default to non-NestAware subscribed, but will be fixed during update self._time_between_snapshots = timedelta(seconds=30) self._last_image = None self._next_snapshot_at = None @property def name(self): """Return the name of the nest, if any.""" return self._name @property def unique_id(self): """Return the serial number.""" return self.device.device_id @property def device_info(self): """Return information about the device.""" return { "identifiers": {(DOMAIN, self.device.device_id)}, "name": self.device.name_long, "manufacturer": "Nest Labs", "model": "Camera", } @property def should_poll(self): """Nest camera should poll periodically.""" return True @property def is_recording(self): """Return true if the device is recording.""" return self._is_streaming @property def brand(self): """Return the brand of the camera.""" return NEST_BRAND @property def supported_features(self): """Nest Cam support turn on and off.""" return SUPPORT_ON_OFF @property def is_on(self): """Return true if on.""" return self._online and self._is_streaming def turn_off(self): """Turn off camera.""" _LOGGER.debug("Turn off camera %s", self._name) # Calling Nest API in is_streaming setter. # device.is_streaming would not immediately change until the process # finished in Nest Cam. self.device.is_streaming = False def turn_on(self): """Turn on camera.""" if not self._online: _LOGGER.error("Camera %s is offline", self._name) return _LOGGER.debug("Turn on camera %s", self._name) # Calling Nest API in is_streaming setter. # device.is_streaming would not immediately change until the process # finished in Nest Cam. self.device.is_streaming = True def update(self): """Cache value from Python-nest.""" self._location = self.device.where self._name = self.device.name self._online = self.device.online self._is_streaming = self.device.is_streaming self._is_video_history_enabled = self.device.is_video_history_enabled if self._is_video_history_enabled: # NestAware allowed 10/min self._time_between_snapshots = timedelta(seconds=6) else: # Otherwise, 2/min self._time_between_snapshots = timedelta(seconds=30) def _ready_for_snapshot(self, now): return self._next_snapshot_at is None or now > self._next_snapshot_at def camera_image(self): """Return a still image response from the camera.""" now = utcnow() if self._ready_for_snapshot(now): url = self.device.snapshot_url try: response = requests.get(url) except requests.exceptions.RequestException as error: _LOGGER.error("Error getting camera image: %s", error) return None self._next_snapshot_at = now + self._time_between_snapshots self._last_image = response.content return self._last_image
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/nest/legacy/camera.py
"""KIRA interface to receive UDP packets from an IR-IP bridge.""" import logging import os import pykira import voluptuous as vol from voluptuous.error import Error as VoluptuousError import yaml from homeassistant.const import ( CONF_CODE, CONF_DEVICE, CONF_HOST, CONF_NAME, CONF_PORT, CONF_REPEAT, CONF_SENSORS, CONF_TYPE, EVENT_HOMEASSISTANT_STOP, STATE_UNKNOWN, ) from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv DOMAIN = "kira" _LOGGER = logging.getLogger(__name__) DEFAULT_HOST = "0.0.0.0" DEFAULT_PORT = 65432 CONF_REMOTES = "remotes" CONF_SENSOR = "sensor" CONF_REMOTE = "remote" CODES_YAML = f"{DOMAIN}_codes.yaml" CODE_SCHEMA = vol.Schema( { vol.Required(CONF_NAME): cv.string, vol.Required(CONF_CODE): cv.string, vol.Optional(CONF_TYPE): cv.string, vol.Optional(CONF_DEVICE): cv.string, vol.Optional(CONF_REPEAT): cv.positive_int, } ) SENSOR_SCHEMA = vol.Schema( { vol.Optional(CONF_NAME, default=DOMAIN): vol.Exclusive(cv.string, "sensors"), vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, } ) REMOTE_SCHEMA = vol.Schema( { vol.Optional(CONF_NAME, default=DOMAIN): vol.Exclusive(cv.string, "remotes"), vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional(CONF_SENSORS): [SENSOR_SCHEMA], vol.Optional(CONF_REMOTES): [REMOTE_SCHEMA], } ) }, extra=vol.ALLOW_EXTRA, ) def load_codes(path): """Load KIRA codes from specified file.""" codes = [] if os.path.exists(path): with open(path) as code_file: data = yaml.safe_load(code_file) or [] for code in data: try: codes.append(CODE_SCHEMA(code)) except VoluptuousError as exception: # keep going _LOGGER.warning("KIRA code invalid data: %s", exception) else: with open(path, "w") as code_file: code_file.write("") return codes def setup(hass, config): """Set up the KIRA component.""" sensors = config.get(DOMAIN, {}).get(CONF_SENSORS, []) remotes = config.get(DOMAIN, {}).get(CONF_REMOTES, []) # If no sensors or remotes were specified, add a sensor if not (sensors or remotes): sensors.append({}) codes = load_codes(hass.config.path(CODES_YAML)) hass.data[DOMAIN] = {CONF_SENSOR: {}, CONF_REMOTE: {}} def load_module(platform, idx, module_conf): """Set up the KIRA module and load platform.""" # note: module_name is not the HA device name. it's just a unique name # to ensure the component and platform can share information module_name = ("%s_%d" % (DOMAIN, idx)) if idx else DOMAIN device_name = module_conf.get(CONF_NAME, DOMAIN) port = module_conf.get(CONF_PORT, DEFAULT_PORT) host = module_conf.get(CONF_HOST, DEFAULT_HOST) if platform == CONF_SENSOR: module = pykira.KiraReceiver(host, port) module.start() else: module = pykira.KiraModule(host, port) hass.data[DOMAIN][platform][module_name] = module for code in codes: code_tuple = (code.get(CONF_NAME), code.get(CONF_DEVICE, STATE_UNKNOWN)) module.registerCode(code_tuple, code.get(CONF_CODE)) discovery.load_platform( hass, platform, DOMAIN, {"name": module_name, "device": device_name}, config ) for idx, module_conf in enumerate(sensors): load_module(CONF_SENSOR, idx, module_conf) for idx, module_conf in enumerate(remotes): load_module(CONF_REMOTE, idx, module_conf) def _stop_kira(_event): """Stop the KIRA receiver.""" for receiver in hass.data[DOMAIN][CONF_SENSOR].values(): receiver.stop() _LOGGER.info("Terminated receivers") hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _stop_kira) return True
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/kira/__init__.py
"""Errors for the cert_expiry integration.""" from homeassistant.exceptions import HomeAssistantError class CertExpiryException(HomeAssistantError): """Base class for cert_expiry exceptions.""" class TemporaryFailure(CertExpiryException): """Temporary failure has occurred.""" class ValidationFailure(CertExpiryException): """Certificate validation failure has occurred.""" class ResolveFailed(TemporaryFailure): """Name resolution failed.""" class ConnectionTimeout(TemporaryFailure): """Network connection timed out.""" class ConnectionRefused(TemporaryFailure): """Network connection refused."""
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/cert_expiry/errors.py
"""Config flow for UpCloud.""" import logging import requests.exceptions import upcloud_api import voluptuous as vol from homeassistant import config_entries from homeassistant.const import CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME from homeassistant.core import callback from .const import DEFAULT_SCAN_INTERVAL, DOMAIN _LOGGER = logging.getLogger(__name__) class UpCloudConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """UpCloud config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL username: str password: str async def async_step_user(self, user_input=None): """Handle user initiated flow.""" if user_input is None: return self._async_show_form(step_id="user") await self.async_set_unique_id(user_input[CONF_USERNAME]) manager = upcloud_api.CloudManager( user_input[CONF_USERNAME], user_input[CONF_PASSWORD] ) errors = {} try: await self.hass.async_add_executor_job(manager.authenticate) except upcloud_api.UpCloudAPIError: errors["base"] = "invalid_auth" _LOGGER.debug("invalid_auth", exc_info=True) except requests.exceptions.RequestException: errors["base"] = "cannot_connect" _LOGGER.debug("cannot_connect", exc_info=True) if errors: return self._async_show_form( step_id="user", user_input=user_input, errors=errors ) return self.async_create_entry(title=user_input[CONF_USERNAME], data=user_input) async def async_step_import(self, user_input=None): """Handle import initiated flow.""" await self.async_set_unique_id(user_input[CONF_USERNAME]) self._abort_if_unique_id_configured() return await self.async_step_user(user_input=user_input) @callback def _async_show_form(self, step_id, user_input=None, errors=None): """Show our form.""" if user_input is None: user_input = {} return self.async_show_form( step_id=step_id, data_schema=vol.Schema( { vol.Required( CONF_USERNAME, default=user_input.get(CONF_USERNAME, "") ): str, vol.Required( CONF_PASSWORD, default=user_input.get(CONF_PASSWORD, "") ): str, } ), errors=errors or {}, ) @staticmethod @callback def async_get_options_flow(config_entry): """Get options flow.""" return UpCloudOptionsFlow(config_entry) class UpCloudOptionsFlow(config_entries.OptionsFlow): """UpCloud options flow.""" def __init__(self, config_entry: config_entries.ConfigEntry): """Initialize options flow.""" self.config_entry = config_entry async def async_step_init(self, user_input=None): """Handle options flow.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) data_schema = vol.Schema( { vol.Optional( CONF_SCAN_INTERVAL, default=self.config_entry.options.get(CONF_SCAN_INTERVAL) or DEFAULT_SCAN_INTERVAL.seconds, ): vol.All(vol.Coerce(int), vol.Range(min=30)), } ) return self.async_show_form(step_id="init", data_schema=data_schema)
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/upcloud/config_flow.py
"""Support for Snips on-device ASR and NLU.""" from datetime import timedelta import json import logging import voluptuous as vol from homeassistant.components import mqtt from homeassistant.core import callback from homeassistant.helpers import config_validation as cv, intent DOMAIN = "snips" CONF_INTENTS = "intents" CONF_ACTION = "action" CONF_FEEDBACK = "feedback_sounds" CONF_PROBABILITY = "probability_threshold" CONF_SITE_IDS = "site_ids" SERVICE_SAY = "say" SERVICE_SAY_ACTION = "say_action" SERVICE_FEEDBACK_ON = "feedback_on" SERVICE_FEEDBACK_OFF = "feedback_off" INTENT_TOPIC = "hermes/intent/#" FEEDBACK_ON_TOPIC = "hermes/feedback/sound/toggleOn" FEEDBACK_OFF_TOPIC = "hermes/feedback/sound/toggleOff" ATTR_TEXT = "text" ATTR_SITE_ID = "site_id" ATTR_CUSTOM_DATA = "custom_data" ATTR_CAN_BE_ENQUEUED = "can_be_enqueued" ATTR_INTENT_FILTER = "intent_filter" _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional(CONF_FEEDBACK): cv.boolean, vol.Optional(CONF_PROBABILITY, default=0): vol.Coerce(float), vol.Optional(CONF_SITE_IDS, default=["default"]): vol.All( cv.ensure_list, [cv.string] ), } ) }, extra=vol.ALLOW_EXTRA, ) INTENT_SCHEMA = vol.Schema( { vol.Required("input"): str, vol.Required("intent"): {vol.Required("intentName"): str}, vol.Optional("slots"): [ { vol.Required("slotName"): str, vol.Required("value"): { vol.Required("kind"): str, vol.Optional("value"): cv.match_all, vol.Optional("rawValue"): cv.match_all, }, } ], }, extra=vol.ALLOW_EXTRA, ) SERVICE_SCHEMA_SAY = vol.Schema( { vol.Required(ATTR_TEXT): str, vol.Optional(ATTR_SITE_ID, default="default"): str, vol.Optional(ATTR_CUSTOM_DATA, default=""): str, } ) SERVICE_SCHEMA_SAY_ACTION = vol.Schema( { vol.Required(ATTR_TEXT): str, vol.Optional(ATTR_SITE_ID, default="default"): str, vol.Optional(ATTR_CUSTOM_DATA, default=""): str, vol.Optional(ATTR_CAN_BE_ENQUEUED, default=True): cv.boolean, vol.Optional(ATTR_INTENT_FILTER): vol.All(cv.ensure_list), } ) SERVICE_SCHEMA_FEEDBACK = vol.Schema( {vol.Optional(ATTR_SITE_ID, default="default"): str} ) async def async_setup(hass, config): """Activate Snips component.""" @callback def async_set_feedback(site_ids, state): """Set Feedback sound state.""" site_ids = site_ids if site_ids else config[DOMAIN].get(CONF_SITE_IDS) topic = FEEDBACK_ON_TOPIC if state else FEEDBACK_OFF_TOPIC for site_id in site_ids: payload = json.dumps({"siteId": site_id}) hass.components.mqtt.async_publish( FEEDBACK_ON_TOPIC, "", qos=0, retain=False ) hass.components.mqtt.async_publish( topic, payload, qos=int(state), retain=state ) if CONF_FEEDBACK in config[DOMAIN]: async_set_feedback(None, config[DOMAIN][CONF_FEEDBACK]) async def message_received(msg): """Handle new messages on MQTT.""" _LOGGER.debug("New intent: %s", msg.payload) try: request = json.loads(msg.payload) except TypeError: _LOGGER.error("Received invalid JSON: %s", msg.payload) return if request["intent"]["confidenceScore"] < config[DOMAIN].get(CONF_PROBABILITY): _LOGGER.warning( "Intent below probaility threshold %s < %s", request["intent"]["confidenceScore"], config[DOMAIN].get(CONF_PROBABILITY), ) return try: request = INTENT_SCHEMA(request) except vol.Invalid as err: _LOGGER.error("Intent has invalid schema: %s. %s", err, request) return if request["intent"]["intentName"].startswith("user_"): intent_type = request["intent"]["intentName"].split("__")[-1] else: intent_type = request["intent"]["intentName"].split(":")[-1] slots = {} for slot in request.get("slots", []): slots[slot["slotName"]] = {"value": resolve_slot_values(slot)} slots["{}_raw".format(slot["slotName"])] = {"value": slot["rawValue"]} slots["site_id"] = {"value": request.get("siteId")} slots["session_id"] = {"value": request.get("sessionId")} slots["confidenceScore"] = {"value": request["intent"]["confidenceScore"]} try: intent_response = await intent.async_handle( hass, DOMAIN, intent_type, slots, request["input"] ) notification = {"sessionId": request.get("sessionId", "default")} if "plain" in intent_response.speech: notification["text"] = intent_response.speech["plain"]["speech"] _LOGGER.debug("send_response %s", json.dumps(notification)) mqtt.async_publish( hass, "hermes/dialogueManager/endSession", json.dumps(notification) ) except intent.UnknownIntent: _LOGGER.warning( "Received unknown intent %s", request["intent"]["intentName"] ) except intent.IntentError: _LOGGER.exception("Error while handling intent: %s", intent_type) await hass.components.mqtt.async_subscribe(INTENT_TOPIC, message_received) async def snips_say(call): """Send a Snips notification message.""" notification = { "siteId": call.data.get(ATTR_SITE_ID, "default"), "customData": call.data.get(ATTR_CUSTOM_DATA, ""), "init": {"type": "notification", "text": call.data.get(ATTR_TEXT)}, } mqtt.async_publish( hass, "hermes/dialogueManager/startSession", json.dumps(notification) ) return async def snips_say_action(call): """Send a Snips action message.""" notification = { "siteId": call.data.get(ATTR_SITE_ID, "default"), "customData": call.data.get(ATTR_CUSTOM_DATA, ""), "init": { "type": "action", "text": call.data.get(ATTR_TEXT), "canBeEnqueued": call.data.get(ATTR_CAN_BE_ENQUEUED, True), "intentFilter": call.data.get(ATTR_INTENT_FILTER, []), }, } mqtt.async_publish( hass, "hermes/dialogueManager/startSession", json.dumps(notification) ) return async def feedback_on(call): """Turn feedback sounds on.""" async_set_feedback(call.data.get(ATTR_SITE_ID), True) async def feedback_off(call): """Turn feedback sounds off.""" async_set_feedback(call.data.get(ATTR_SITE_ID), False) hass.services.async_register( DOMAIN, SERVICE_SAY, snips_say, schema=SERVICE_SCHEMA_SAY ) hass.services.async_register( DOMAIN, SERVICE_SAY_ACTION, snips_say_action, schema=SERVICE_SCHEMA_SAY_ACTION ) hass.services.async_register( DOMAIN, SERVICE_FEEDBACK_ON, feedback_on, schema=SERVICE_SCHEMA_FEEDBACK ) hass.services.async_register( DOMAIN, SERVICE_FEEDBACK_OFF, feedback_off, schema=SERVICE_SCHEMA_FEEDBACK ) return True def resolve_slot_values(slot): """Convert snips builtin types to usable values.""" if "value" in slot["value"]: value = slot["value"]["value"] else: value = slot["rawValue"] if slot.get("entity") == "snips/duration": delta = timedelta( weeks=slot["value"]["weeks"], days=slot["value"]["days"], hours=slot["value"]["hours"], minutes=slot["value"]["minutes"], seconds=slot["value"]["seconds"], ) value = delta.seconds return value
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/snips/__init__.py
"""Config flow for Network UPS Tools (NUT) integration.""" import logging import voluptuous as vol from homeassistant import config_entries, core, exceptions from homeassistant.const import ( CONF_ALIAS, CONF_BASE, CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_RESOURCES, CONF_SCAN_INTERVAL, CONF_USERNAME, ) from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from . import PyNUTData, find_resources_in_config_entry from .const import ( DEFAULT_HOST, DEFAULT_PORT, DEFAULT_SCAN_INTERVAL, DOMAIN, KEY_STATUS, KEY_STATUS_DISPLAY, SENSOR_NAME, SENSOR_TYPES, ) _LOGGER = logging.getLogger(__name__) SENSOR_DICT = { sensor_id: sensor_spec[SENSOR_NAME] for sensor_id, sensor_spec in SENSOR_TYPES.items() } def _base_schema(discovery_info): """Generate base schema.""" base_schema = {} if not discovery_info: base_schema.update( { vol.Optional(CONF_HOST, default=DEFAULT_HOST): str, vol.Optional(CONF_PORT, default=DEFAULT_PORT): int, } ) base_schema.update( {vol.Optional(CONF_USERNAME): str, vol.Optional(CONF_PASSWORD): str} ) return vol.Schema(base_schema) def _resource_schema_base(available_resources, selected_resources): """Resource selection schema.""" known_available_resources = { sensor_id: sensor[SENSOR_NAME] for sensor_id, sensor in SENSOR_TYPES.items() if sensor_id in available_resources } if KEY_STATUS in known_available_resources: known_available_resources[KEY_STATUS_DISPLAY] = SENSOR_TYPES[ KEY_STATUS_DISPLAY ][SENSOR_NAME] return { vol.Required(CONF_RESOURCES, default=selected_resources): cv.multi_select( known_available_resources ) } def _ups_schema(ups_list): """UPS selection schema.""" return vol.Schema({vol.Required(CONF_ALIAS): vol.In(ups_list)}) async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect. Data has the keys from _base_schema with values provided by the user. """ host = data[CONF_HOST] port = data[CONF_PORT] alias = data.get(CONF_ALIAS) username = data.get(CONF_USERNAME) password = data.get(CONF_PASSWORD) data = PyNUTData(host, port, alias, username, password) await hass.async_add_executor_job(data.update) status = data.status if not status: raise CannotConnect return {"ups_list": data.ups_list, "available_resources": status} def _format_host_port_alias(user_input): """Format a host, port, and alias so it can be used for comparison or display.""" host = user_input[CONF_HOST] port = user_input[CONF_PORT] alias = user_input.get(CONF_ALIAS) if alias: return f"{alias}@{host}:{port}" return f"{host}:{port}" class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Network UPS Tools (NUT).""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize the nut config flow.""" self.nut_config = {} self.available_resources = {} self.discovery_info = {} self.ups_list = None self.title = None async def async_step_zeroconf(self, discovery_info): """Prepare configuration for a discovered nut device.""" self.discovery_info = discovery_info await self._async_handle_discovery_without_unique_id() self.context["title_placeholders"] = { CONF_PORT: discovery_info.get(CONF_PORT, DEFAULT_PORT), CONF_HOST: discovery_info[CONF_HOST], } return await self.async_step_user() async def async_step_user(self, user_input=None): """Handle the user input.""" errors = {} if user_input is not None: if self.discovery_info: user_input.update( { CONF_HOST: self.discovery_info[CONF_HOST], CONF_PORT: self.discovery_info.get(CONF_PORT, DEFAULT_PORT), } ) info, errors = await self._async_validate_or_error(user_input) if not errors: self.nut_config.update(user_input) if len(info["ups_list"]) > 1: self.ups_list = info["ups_list"] return await self.async_step_ups() if self._host_port_alias_already_configured(self.nut_config): return self.async_abort(reason="already_configured") self.available_resources.update(info["available_resources"]) return await self.async_step_resources() return self.async_show_form( step_id="user", data_schema=_base_schema(self.discovery_info), errors=errors ) async def async_step_ups(self, user_input=None): """Handle the picking the ups.""" errors = {} if user_input is not None: self.nut_config.update(user_input) if self._host_port_alias_already_configured(self.nut_config): return self.async_abort(reason="already_configured") info, errors = await self._async_validate_or_error(self.nut_config) if not errors: self.available_resources.update(info["available_resources"]) return await self.async_step_resources() return self.async_show_form( step_id="ups", data_schema=_ups_schema(self.ups_list), errors=errors, ) async def async_step_resources(self, user_input=None): """Handle the picking the resources.""" if user_input is None: return self.async_show_form( step_id="resources", data_schema=vol.Schema( _resource_schema_base(self.available_resources, []) ), ) self.nut_config.update(user_input) title = _format_host_port_alias(self.nut_config) return self.async_create_entry(title=title, data=self.nut_config) def _host_port_alias_already_configured(self, user_input): """See if we already have a nut entry matching user input configured.""" existing_host_port_aliases = { _format_host_port_alias(entry.data) for entry in self._async_current_entries() if CONF_HOST in entry.data } return _format_host_port_alias(user_input) in existing_host_port_aliases async def _async_validate_or_error(self, config): errors = {} info = {} try: info = await validate_input(self.hass, config) except CannotConnect: errors[CONF_BASE] = "cannot_connect" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors[CONF_BASE] = "unknown" return info, errors @staticmethod @callback def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return OptionsFlowHandler(config_entry) class OptionsFlowHandler(config_entries.OptionsFlow): """Handle a option flow for nut.""" def __init__(self, config_entry: config_entries.ConfigEntry): """Initialize options flow.""" self.config_entry = config_entry async def async_step_init(self, user_input=None): """Handle options flow.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) resources = find_resources_in_config_entry(self.config_entry) scan_interval = self.config_entry.options.get( CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL ) errors = {} try: info = await validate_input(self.hass, self.config_entry.data) except CannotConnect: errors[CONF_BASE] = "cannot_connect" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors[CONF_BASE] = "unknown" if errors: return self.async_show_form(step_id="abort", errors=errors) base_schema = _resource_schema_base(info["available_resources"], resources) base_schema[ vol.Optional(CONF_SCAN_INTERVAL, default=scan_interval) ] = cv.positive_int return self.async_show_form( step_id="init", data_schema=vol.Schema(base_schema), errors=errors ) async def async_step_abort(self, user_input=None): """Abort options flow.""" return self.async_create_entry(title="", data=self.config_entry.options) class CannotConnect(exceptions.HomeAssistantError): """Error to indicate we cannot connect."""
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/nut/config_flow.py
"""The fritzbox_callmonitor integration.""" from asyncio import gather import logging from fritzconnection.core.exceptions import FritzConnectionException, FritzSecurityError from requests.exceptions import ConnectionError as RequestsConnectionError from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.exceptions import ConfigEntryNotReady from .base import FritzBoxPhonebook from .const import ( CONF_PHONEBOOK, CONF_PREFIXES, DOMAIN, FRITZBOX_PHONEBOOK, PLATFORMS, UNDO_UPDATE_LISTENER, ) _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, config_entry): """Set up the fritzbox_callmonitor platforms.""" fritzbox_phonebook = FritzBoxPhonebook( host=config_entry.data[CONF_HOST], username=config_entry.data[CONF_USERNAME], password=config_entry.data[CONF_PASSWORD], phonebook_id=config_entry.data[CONF_PHONEBOOK], prefixes=config_entry.options.get(CONF_PREFIXES), ) try: await hass.async_add_executor_job(fritzbox_phonebook.init_phonebook) except FritzSecurityError as ex: _LOGGER.error( "User has insufficient permissions to access AVM FRITZ!Box settings and its phonebooks: %s", ex, ) return False except FritzConnectionException as ex: _LOGGER.error("Invalid authentication: %s", ex) return False except RequestsConnectionError as ex: _LOGGER.error("Unable to connect to AVM FRITZ!Box call monitor: %s", ex) raise ConfigEntryNotReady from ex undo_listener = config_entry.add_update_listener(update_listener) hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][config_entry.entry_id] = { FRITZBOX_PHONEBOOK: fritzbox_phonebook, UNDO_UPDATE_LISTENER: undo_listener, } for platform in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, platform) ) return True async def async_unload_entry(hass, config_entry): """Unloading the fritzbox_callmonitor platforms.""" unload_ok = all( await gather( *[ hass.config_entries.async_forward_entry_unload(config_entry, platform) for platform in PLATFORMS ] ) ) hass.data[DOMAIN][config_entry.entry_id][UNDO_UPDATE_LISTENER]() if unload_ok: hass.data[DOMAIN].pop(config_entry.entry_id) return unload_ok async def update_listener(hass, config_entry): """Update listener to reload after option has changed.""" await hass.config_entries.async_reload(config_entry.entry_id)
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/fritzbox_callmonitor/__init__.py
"""Support for Axis binary sensors.""" from datetime import timedelta from axis.event_stream import ( CLASS_INPUT, CLASS_LIGHT, CLASS_MOTION, CLASS_OUTPUT, CLASS_PTZ, CLASS_SOUND, FenceGuard, LoiteringGuard, MotionGuard, ObjectAnalytics, Vmd4, ) from homeassistant.components.binary_sensor import ( DEVICE_CLASS_CONNECTIVITY, DEVICE_CLASS_LIGHT, DEVICE_CLASS_MOTION, DEVICE_CLASS_SOUND, BinarySensorEntity, ) from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.util.dt import utcnow from .axis_base import AxisEventBase from .const import DOMAIN as AXIS_DOMAIN DEVICE_CLASS = { CLASS_INPUT: DEVICE_CLASS_CONNECTIVITY, CLASS_LIGHT: DEVICE_CLASS_LIGHT, CLASS_MOTION: DEVICE_CLASS_MOTION, CLASS_SOUND: DEVICE_CLASS_SOUND, } async def async_setup_entry(hass, config_entry, async_add_entities): """Set up a Axis binary sensor.""" device = hass.data[AXIS_DOMAIN][config_entry.unique_id] @callback def async_add_sensor(event_id): """Add binary sensor from Axis device.""" event = device.api.event[event_id] if event.CLASS not in (CLASS_OUTPUT, CLASS_PTZ) and not ( event.CLASS == CLASS_LIGHT and event.TYPE == "Light" ): async_add_entities([AxisBinarySensor(event, device)]) device.listeners.append( async_dispatcher_connect(hass, device.signal_new_event, async_add_sensor) ) class AxisBinarySensor(AxisEventBase, BinarySensorEntity): """Representation of a binary Axis event.""" def __init__(self, event, device): """Initialize the Axis binary sensor.""" super().__init__(event, device) self.cancel_scheduled_update = None @callback def update_callback(self, no_delay=False): """Update the sensor's state, if needed. Parameter no_delay is True when device_event_reachable is sent. """ @callback def scheduled_update(now): """Timer callback for sensor update.""" self.cancel_scheduled_update = None self.async_write_ha_state() if self.cancel_scheduled_update is not None: self.cancel_scheduled_update() self.cancel_scheduled_update = None if self.is_on or self.device.option_trigger_time == 0 or no_delay: self.async_write_ha_state() return self.cancel_scheduled_update = async_track_point_in_utc_time( self.hass, scheduled_update, utcnow() + timedelta(seconds=self.device.option_trigger_time), ) @property def is_on(self): """Return true if event is active.""" return self.event.is_tripped @property def name(self): """Return the name of the event.""" if ( self.event.CLASS == CLASS_INPUT and self.event.id in self.device.api.vapix.ports and self.device.api.vapix.ports[self.event.id].name ): return ( f"{self.device.name} {self.device.api.vapix.ports[self.event.id].name}" ) if self.event.CLASS == CLASS_MOTION: for event_class, event_data in ( (FenceGuard, self.device.api.vapix.fence_guard), (LoiteringGuard, self.device.api.vapix.loitering_guard), (MotionGuard, self.device.api.vapix.motion_guard), (ObjectAnalytics, self.device.api.vapix.object_analytics), (Vmd4, self.device.api.vapix.vmd4), ): if ( isinstance(self.event, event_class) and event_data and self.event.id in event_data ): return f"{self.device.name} {self.event.TYPE} {event_data[self.event.id].name}" return super().name @property def device_class(self): """Return the class of the sensor.""" return DEVICE_CLASS.get(self.event.CLASS)
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/axis/binary_sensor.py
"""Support for Axis lights.""" from axis.event_stream import CLASS_LIGHT from homeassistant.components.light import ( ATTR_BRIGHTNESS, SUPPORT_BRIGHTNESS, LightEntity, ) from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from .axis_base import AxisEventBase from .const import DOMAIN as AXIS_DOMAIN async def async_setup_entry(hass, config_entry, async_add_entities): """Set up a Axis light.""" device = hass.data[AXIS_DOMAIN][config_entry.unique_id] if ( device.api.vapix.light_control is None or len(device.api.vapix.light_control) == 0 ): return @callback def async_add_sensor(event_id): """Add light from Axis device.""" event = device.api.event[event_id] if event.CLASS == CLASS_LIGHT and event.TYPE == "Light": async_add_entities([AxisLight(event, device)]) device.listeners.append( async_dispatcher_connect(hass, device.signal_new_event, async_add_sensor) ) class AxisLight(AxisEventBase, LightEntity): """Representation of a light Axis event.""" def __init__(self, event, device): """Initialize the Axis light.""" super().__init__(event, device) self.light_id = f"led{self.event.id}" self.current_intensity = 0 self.max_intensity = 0 self._features = SUPPORT_BRIGHTNESS async def async_added_to_hass(self) -> None: """Subscribe lights events.""" await super().async_added_to_hass() current_intensity = ( await self.device.api.vapix.light_control.get_current_intensity( self.light_id ) ) self.current_intensity = current_intensity["data"]["intensity"] max_intensity = await self.device.api.vapix.light_control.get_valid_intensity( self.light_id ) self.max_intensity = max_intensity["data"]["ranges"][0]["high"] @property def supported_features(self): """Flag supported features.""" return self._features @property def name(self): """Return the name of the light.""" light_type = self.device.api.vapix.light_control[self.light_id].light_type return f"{self.device.name} {light_type} {self.event.TYPE} {self.event.id}" @property def is_on(self): """Return true if light is on.""" return self.event.is_tripped @property def brightness(self): """Return the brightness of this light between 0..255.""" return int((self.current_intensity / self.max_intensity) * 255) async def async_turn_on(self, **kwargs): """Turn on light.""" if not self.is_on: await self.device.api.vapix.light_control.activate_light(self.light_id) if ATTR_BRIGHTNESS in kwargs: intensity = int((kwargs[ATTR_BRIGHTNESS] / 255) * self.max_intensity) await self.device.api.vapix.light_control.set_manual_intensity( self.light_id, intensity ) async def async_turn_off(self, **kwargs): """Turn off light.""" if self.is_on: await self.device.api.vapix.light_control.deactivate_light(self.light_id) async def async_update(self): """Update brightness.""" current_intensity = ( await self.device.api.vapix.light_control.get_current_intensity( self.light_id ) ) self.current_intensity = current_intensity["data"]["intensity"] @property def should_poll(self): """Brightness needs polling.""" return True
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/axis/light.py
"""The Keenetic Client class.""" import logging from homeassistant.components.binary_sensor import ( DEVICE_CLASS_CONNECTIVITY, BinarySensorEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from . import KeeneticRouter from .const import DOMAIN, ROUTER _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities ): """Set up device tracker for Keenetic NDMS2 component.""" router: KeeneticRouter = hass.data[DOMAIN][config_entry.entry_id][ROUTER] async_add_entities([RouterOnlineBinarySensor(router)]) class RouterOnlineBinarySensor(BinarySensorEntity): """Representation router connection status.""" def __init__(self, router: KeeneticRouter): """Initialize the APCUPSd binary device.""" self._router = router @property def name(self): """Return the name of the online status sensor.""" return f"{self._router.name} Online" @property def unique_id(self) -> str: """Return a unique identifier for this device.""" return f"online_{self._router.config_entry.entry_id}" @property def is_on(self): """Return true if the UPS is online, else false.""" return self._router.available @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return DEVICE_CLASS_CONNECTIVITY @property def should_poll(self) -> bool: """Return False since entity pushes its state to HA.""" return False @property def device_info(self): """Return a client description for device registry.""" return self._router.device_info async def async_added_to_hass(self): """Client entity created.""" self.async_on_remove( async_dispatcher_connect( self.hass, self._router.signal_update, self.async_write_ha_state, ) )
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/keenetic_ndms2/binary_sensor.py
"""Handler for Hass.io.""" import asyncio import logging import os import aiohttp from homeassistant.components.http import ( CONF_SERVER_HOST, CONF_SERVER_PORT, CONF_SSL_CERTIFICATE, ) from homeassistant.const import HTTP_BAD_REQUEST, HTTP_OK, SERVER_PORT from .const import X_HASSIO _LOGGER = logging.getLogger(__name__) class HassioAPIError(RuntimeError): """Return if a API trow a error.""" def _api_bool(funct): """Return a boolean.""" async def _wrapper(*argv, **kwargs): """Wrap function.""" try: data = await funct(*argv, **kwargs) return data["result"] == "ok" except HassioAPIError: return False return _wrapper def api_data(funct): """Return data of an api.""" async def _wrapper(*argv, **kwargs): """Wrap function.""" data = await funct(*argv, **kwargs) if data["result"] == "ok": return data["data"] raise HassioAPIError(data["message"]) return _wrapper class HassIO: """Small API wrapper for Hass.io.""" def __init__( self, loop: asyncio.AbstractEventLoop, websession: aiohttp.ClientSession, ip: str, ) -> None: """Initialize Hass.io API.""" self.loop = loop self.websession = websession self._ip = ip @_api_bool def is_connected(self): """Return true if it connected to Hass.io supervisor. This method return a coroutine. """ return self.send_command("/supervisor/ping", method="get", timeout=15) @api_data def get_info(self): """Return generic Supervisor information. This method return a coroutine. """ return self.send_command("/info", method="get") @api_data def get_host_info(self): """Return data for Host. This method return a coroutine. """ return self.send_command("/host/info", method="get") @api_data def get_os_info(self): """Return data for the OS. This method return a coroutine. """ return self.send_command("/os/info", method="get") @api_data def get_core_info(self): """Return data for Home Asssistant Core. This method returns a coroutine. """ return self.send_command("/core/info", method="get") @api_data def get_supervisor_info(self): """Return data for the Supervisor. This method returns a coroutine. """ return self.send_command("/supervisor/info", method="get") @api_data def get_addon_info(self, addon): """Return data for a Add-on. This method return a coroutine. """ return self.send_command(f"/addons/{addon}/info", method="get") @api_data def get_ingress_panels(self): """Return data for Add-on ingress panels. This method return a coroutine. """ return self.send_command("/ingress/panels", method="get") @_api_bool def restart_homeassistant(self): """Restart Home-Assistant container. This method return a coroutine. """ return self.send_command("/homeassistant/restart") @_api_bool def stop_homeassistant(self): """Stop Home-Assistant container. This method return a coroutine. """ return self.send_command("/homeassistant/stop") @api_data def retrieve_discovery_messages(self): """Return all discovery data from Hass.io API. This method return a coroutine. """ return self.send_command("/discovery", method="get", timeout=60) @api_data def get_discovery_message(self, uuid): """Return a single discovery data message. This method return a coroutine. """ return self.send_command(f"/discovery/{uuid}", method="get") @_api_bool async def update_hass_api(self, http_config, refresh_token): """Update Home Assistant API data on Hass.io.""" port = http_config.get(CONF_SERVER_PORT) or SERVER_PORT options = { "ssl": CONF_SSL_CERTIFICATE in http_config, "port": port, "watchdog": True, "refresh_token": refresh_token.token, } if http_config.get(CONF_SERVER_HOST) is not None: options["watchdog"] = False _LOGGER.warning( "Found incompatible HTTP option 'server_host'. Watchdog feature disabled" ) return await self.send_command("/homeassistant/options", payload=options) @_api_bool def update_hass_timezone(self, timezone): """Update Home-Assistant timezone data on Hass.io. This method return a coroutine. """ return self.send_command("/supervisor/options", payload={"timezone": timezone}) @_api_bool def update_diagnostics(self, diagnostics: bool): """Update Supervisor diagnostics setting. This method return a coroutine. """ return self.send_command( "/supervisor/options", payload={"diagnostics": diagnostics} ) async def send_command(self, command, method="post", payload=None, timeout=10): """Send API command to Hass.io. This method is a coroutine. """ try: request = await self.websession.request( method, f"http://{self._ip}{command}", json=payload, headers={X_HASSIO: os.environ.get("HASSIO_TOKEN", "")}, timeout=aiohttp.ClientTimeout(total=timeout), ) if request.status not in (HTTP_OK, HTTP_BAD_REQUEST): _LOGGER.error("%s return code %d", command, request.status) raise HassioAPIError() answer = await request.json() return answer except asyncio.TimeoutError: _LOGGER.error("Timeout on %s request", command) except aiohttp.ClientError as err: _LOGGER.error("Client error on %s request %s", command, err) raise HassioAPIError()
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/hassio/handler.py
"""Support to select an option from a list.""" from __future__ import annotations import logging import voluptuous as vol from homeassistant.const import ( ATTR_EDITABLE, ATTR_OPTION, CONF_ICON, CONF_ID, CONF_NAME, SERVICE_RELOAD, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import collection import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.restore_state import RestoreEntity import homeassistant.helpers.service from homeassistant.helpers.storage import Store from homeassistant.helpers.typing import ConfigType, ServiceCallType _LOGGER = logging.getLogger(__name__) DOMAIN = "input_select" CONF_INITIAL = "initial" CONF_OPTIONS = "options" ATTR_OPTIONS = "options" ATTR_CYCLE = "cycle" SERVICE_SELECT_OPTION = "select_option" SERVICE_SELECT_NEXT = "select_next" SERVICE_SELECT_PREVIOUS = "select_previous" SERVICE_SELECT_FIRST = "select_first" SERVICE_SELECT_LAST = "select_last" SERVICE_SET_OPTIONS = "set_options" STORAGE_KEY = DOMAIN STORAGE_VERSION = 1 CREATE_FIELDS = { vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)), vol.Required(CONF_OPTIONS): vol.All(cv.ensure_list, vol.Length(min=1), [cv.string]), vol.Optional(CONF_INITIAL): cv.string, vol.Optional(CONF_ICON): cv.icon, } UPDATE_FIELDS = { vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_OPTIONS): vol.All(cv.ensure_list, vol.Length(min=1), [cv.string]), vol.Optional(CONF_INITIAL): cv.string, vol.Optional(CONF_ICON): cv.icon, } def _cv_input_select(cfg): """Configure validation helper for input select (voluptuous).""" options = cfg[CONF_OPTIONS] initial = cfg.get(CONF_INITIAL) if initial is not None and initial not in options: raise vol.Invalid( f"initial state {initial} is not part of the options: {','.join(options)}" ) return cfg CONFIG_SCHEMA = vol.Schema( { DOMAIN: cv.schema_with_slug_keys( vol.All( { vol.Optional(CONF_NAME): cv.string, vol.Required(CONF_OPTIONS): vol.All( cv.ensure_list, vol.Length(min=1), [cv.string] ), vol.Optional(CONF_INITIAL): cv.string, vol.Optional(CONF_ICON): cv.icon, }, _cv_input_select, ) ) }, extra=vol.ALLOW_EXTRA, ) RELOAD_SERVICE_SCHEMA = vol.Schema({}) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up an input select.""" component = EntityComponent(_LOGGER, DOMAIN, hass) id_manager = collection.IDManager() yaml_collection = collection.YamlCollection( logging.getLogger(f"{__name__}.yaml_collection"), id_manager ) collection.sync_entity_lifecycle( hass, DOMAIN, DOMAIN, component, yaml_collection, InputSelect.from_yaml ) storage_collection = InputSelectStorageCollection( Store(hass, STORAGE_VERSION, STORAGE_KEY), logging.getLogger(f"{__name__}.storage_collection"), id_manager, ) collection.sync_entity_lifecycle( hass, DOMAIN, DOMAIN, component, storage_collection, InputSelect ) await yaml_collection.async_load( [{CONF_ID: id_, **cfg} for id_, cfg in config.get(DOMAIN, {}).items()] ) await storage_collection.async_load() collection.StorageCollectionWebsocket( storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS ).async_setup(hass) async def reload_service_handler(service_call: ServiceCallType) -> None: """Reload yaml entities.""" conf = await component.async_prepare_reload(skip_reset=True) if conf is None: conf = {DOMAIN: {}} await yaml_collection.async_load( [{CONF_ID: id_, **cfg} for id_, cfg in conf.get(DOMAIN, {}).items()] ) homeassistant.helpers.service.async_register_admin_service( hass, DOMAIN, SERVICE_RELOAD, reload_service_handler, schema=RELOAD_SERVICE_SCHEMA, ) component.async_register_entity_service( SERVICE_SELECT_OPTION, {vol.Required(ATTR_OPTION): cv.string}, "async_select_option", ) component.async_register_entity_service( SERVICE_SELECT_NEXT, {vol.Optional(ATTR_CYCLE, default=True): bool}, "async_next", ) component.async_register_entity_service( SERVICE_SELECT_PREVIOUS, {vol.Optional(ATTR_CYCLE, default=True): bool}, "async_previous", ) component.async_register_entity_service( SERVICE_SELECT_FIRST, {}, callback(lambda entity, call: entity.async_select_index(0)), ) component.async_register_entity_service( SERVICE_SELECT_LAST, {}, callback(lambda entity, call: entity.async_select_index(-1)), ) component.async_register_entity_service( SERVICE_SET_OPTIONS, { vol.Required(ATTR_OPTIONS): vol.All( cv.ensure_list, vol.Length(min=1), [cv.string] ) }, "async_set_options", ) return True class InputSelectStorageCollection(collection.StorageCollection): """Input storage based collection.""" CREATE_SCHEMA = vol.Schema(vol.All(CREATE_FIELDS, _cv_input_select)) UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS) async def _process_create_data(self, data: dict) -> dict: """Validate the config is valid.""" return self.CREATE_SCHEMA(data) @callback def _get_suggested_id(self, info: dict) -> str: """Suggest an ID based on the config.""" return info[CONF_NAME] async def _update_data(self, data: dict, update_data: dict) -> dict: """Return a new updated data object.""" update_data = self.UPDATE_SCHEMA(update_data) return _cv_input_select({**data, **update_data}) class InputSelect(RestoreEntity): """Representation of a select input.""" def __init__(self, config: dict): """Initialize a select input.""" self._config = config self.editable = True self._current_option = config.get(CONF_INITIAL) @classmethod def from_yaml(cls, config: dict) -> InputSelect: """Return entity instance initialized from yaml storage.""" input_select = cls(config) input_select.entity_id = f"{DOMAIN}.{config[CONF_ID]}" input_select.editable = False return input_select async def async_added_to_hass(self): """Run when entity about to be added.""" await super().async_added_to_hass() if self._current_option is not None: return state = await self.async_get_last_state() if not state or state.state not in self._options: self._current_option = self._options[0] else: self._current_option = state.state @property def should_poll(self): """If entity should be polled.""" return False @property def name(self): """Return the name of the select input.""" return self._config.get(CONF_NAME) @property def icon(self): """Return the icon to be used for this entity.""" return self._config.get(CONF_ICON) @property def _options(self) -> list[str]: """Return a list of selection options.""" return self._config[CONF_OPTIONS] @property def state(self): """Return the state of the component.""" return self._current_option @property def extra_state_attributes(self): """Return the state attributes.""" return {ATTR_OPTIONS: self._config[ATTR_OPTIONS], ATTR_EDITABLE: self.editable} @property def unique_id(self) -> str | None: """Return unique id for the entity.""" return self._config[CONF_ID] @callback def async_select_option(self, option): """Select new option.""" if option not in self._options: _LOGGER.warning( "Invalid option: %s (possible options: %s)", option, ", ".join(self._options), ) return self._current_option = option self.async_write_ha_state() @callback def async_select_index(self, idx): """Select new option by index.""" new_index = idx % len(self._options) self._current_option = self._options[new_index] self.async_write_ha_state() @callback def async_offset_index(self, offset, cycle): """Offset current index.""" current_index = self._options.index(self._current_option) new_index = current_index + offset if cycle: new_index = new_index % len(self._options) else: if new_index < 0: new_index = 0 elif new_index >= len(self._options): new_index = len(self._options) - 1 self._current_option = self._options[new_index] self.async_write_ha_state() @callback def async_next(self, cycle): """Select next option.""" self.async_offset_index(1, cycle) @callback def async_previous(self, cycle): """Select previous option.""" self.async_offset_index(-1, cycle) @callback def async_set_options(self, options): """Set options.""" self._current_option = options[0] self._config[CONF_OPTIONS] = options self.async_write_ha_state() async def async_update_config(self, config: dict) -> None: """Handle when the config is updated.""" self._config = config self.async_write_ha_state()
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/input_select/__init__.py
"""BleBox climate entity.""" from homeassistant.components.climate import ClimateEntity from homeassistant.components.climate.const import ( CURRENT_HVAC_HEAT, CURRENT_HVAC_IDLE, CURRENT_HVAC_OFF, HVAC_MODE_HEAT, HVAC_MODE_OFF, SUPPORT_TARGET_TEMPERATURE, ) from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS from . import BleBoxEntity, create_blebox_entities async def async_setup_entry(hass, config_entry, async_add_entities): """Set up a BleBox climate entity.""" create_blebox_entities( hass, config_entry, async_add_entities, BleBoxClimateEntity, "climates" ) class BleBoxClimateEntity(BleBoxEntity, ClimateEntity): """Representation of a BleBox climate feature (saunaBox).""" @property def supported_features(self): """Return the supported climate features.""" return SUPPORT_TARGET_TEMPERATURE @property def hvac_mode(self): """Return the desired HVAC mode.""" if self._feature.is_on is None: return None return HVAC_MODE_HEAT if self._feature.is_on else HVAC_MODE_OFF @property def hvac_action(self): """Return the actual current HVAC action.""" is_on = self._feature.is_on if not is_on: return None if is_on is None else CURRENT_HVAC_OFF # NOTE: In practice, there's no need to handle case when is_heating is None return CURRENT_HVAC_HEAT if self._feature.is_heating else CURRENT_HVAC_IDLE @property def hvac_modes(self): """Return a list of possible HVAC modes.""" return [HVAC_MODE_OFF, HVAC_MODE_HEAT] @property def temperature_unit(self): """Return the temperature unit.""" return TEMP_CELSIUS @property def max_temp(self): """Return the maximum temperature supported.""" return self._feature.max_temp @property def min_temp(self): """Return the maximum temperature supported.""" return self._feature.min_temp @property def current_temperature(self): """Return the current temperature.""" return self._feature.current @property def target_temperature(self): """Return the desired thermostat temperature.""" return self._feature.desired async def async_set_hvac_mode(self, hvac_mode): """Set the climate entity mode.""" if hvac_mode == HVAC_MODE_HEAT: await self._feature.async_on() return await self._feature.async_off() async def async_set_temperature(self, **kwargs): """Set the thermostat temperature.""" value = kwargs[ATTR_TEMPERATURE] await self._feature.async_set_temperature(value)
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/blebox/climate.py
"""Component to embed TP-Link smart home devices.""" import logging import voluptuous as vol from homeassistant import config_entries from homeassistant.const import CONF_HOST import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType, HomeAssistantType from .common import ( ATTR_CONFIG, CONF_DIMMER, CONF_DISCOVERY, CONF_LIGHT, CONF_STRIP, CONF_SWITCH, SmartDevices, async_discover_devices, get_static_devices, ) _LOGGER = logging.getLogger(__name__) DOMAIN = "tplink" TPLINK_HOST_SCHEMA = vol.Schema({vol.Required(CONF_HOST): cv.string}) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional(CONF_LIGHT, default=[]): vol.All( cv.ensure_list, [TPLINK_HOST_SCHEMA] ), vol.Optional(CONF_SWITCH, default=[]): vol.All( cv.ensure_list, [TPLINK_HOST_SCHEMA] ), vol.Optional(CONF_STRIP, default=[]): vol.All( cv.ensure_list, [TPLINK_HOST_SCHEMA] ), vol.Optional(CONF_DIMMER, default=[]): vol.All( cv.ensure_list, [TPLINK_HOST_SCHEMA] ), vol.Optional(CONF_DISCOVERY, default=True): cv.boolean, } ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the TP-Link component.""" conf = config.get(DOMAIN) hass.data[DOMAIN] = {} hass.data[DOMAIN][ATTR_CONFIG] = conf if conf is not None: hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT} ) ) return True async def async_setup_entry(hass: HomeAssistantType, config_entry: ConfigType): """Set up TPLink from a config entry.""" config_data = hass.data[DOMAIN].get(ATTR_CONFIG) # These will contain the initialized devices lights = hass.data[DOMAIN][CONF_LIGHT] = [] switches = hass.data[DOMAIN][CONF_SWITCH] = [] # Add static devices static_devices = SmartDevices() if config_data is not None: static_devices = get_static_devices(config_data) lights.extend(static_devices.lights) switches.extend(static_devices.switches) # Add discovered devices if config_data is None or config_data[CONF_DISCOVERY]: discovered_devices = await async_discover_devices(hass, static_devices) lights.extend(discovered_devices.lights) switches.extend(discovered_devices.switches) forward_setup = hass.config_entries.async_forward_entry_setup if lights: _LOGGER.debug( "Got %s lights: %s", len(lights), ", ".join([d.host for d in lights]) ) hass.async_create_task(forward_setup(config_entry, "light")) if switches: _LOGGER.debug( "Got %s switches: %s", len(switches), ", ".join([d.host for d in switches]) ) hass.async_create_task(forward_setup(config_entry, "switch")) return True async def async_unload_entry(hass, entry): """Unload a config entry.""" forward_unload = hass.config_entries.async_forward_entry_unload remove_lights = remove_switches = False if hass.data[DOMAIN][CONF_LIGHT]: remove_lights = await forward_unload(entry, "light") if hass.data[DOMAIN][CONF_SWITCH]: remove_switches = await forward_unload(entry, "switch") if remove_lights or remove_switches: hass.data[DOMAIN].clear() return True # We were not able to unload the platforms, either because there # were none or one of the forward_unloads failed. return False
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/tplink/__init__.py
"""Network utilities.""" from __future__ import annotations from ipaddress import IPv4Address, IPv6Address, ip_address, ip_network import yarl # RFC6890 - IP addresses of loopback interfaces LOOPBACK_NETWORKS = ( ip_network("127.0.0.0/8"), ip_network("::1/128"), ip_network("::ffff:127.0.0.0/104"), ) # RFC6890 - Address allocation for Private Internets PRIVATE_NETWORKS = ( ip_network("fd00::/8"), ip_network("10.0.0.0/8"), ip_network("172.16.0.0/12"), ip_network("192.168.0.0/16"), ) # RFC6890 - Link local ranges LINK_LOCAL_NETWORK = ip_network("169.254.0.0/16") def is_loopback(address: IPv4Address | IPv6Address) -> bool: """Check if an address is a loopback address.""" return any(address in network for network in LOOPBACK_NETWORKS) def is_private(address: IPv4Address | IPv6Address) -> bool: """Check if an address is a private address.""" return any(address in network for network in PRIVATE_NETWORKS) def is_link_local(address: IPv4Address | IPv6Address) -> bool: """Check if an address is link local.""" return address in LINK_LOCAL_NETWORK def is_local(address: IPv4Address | IPv6Address) -> bool: """Check if an address is loopback or private.""" return is_loopback(address) or is_private(address) def is_invalid(address: IPv4Address | IPv6Address) -> bool: """Check if an address is invalid.""" return bool(address == ip_address("0.0.0.0")) def is_ip_address(address: str) -> bool: """Check if a given string is an IP address.""" try: ip_address(address) except ValueError: return False return True def normalize_url(address: str) -> str: """Normalize a given URL.""" url = yarl.URL(address.rstrip("/")) if url.is_default_port(): return str(url.with_port(None)) return str(url)
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/util/network.py
"""Support for the Daikin HVAC.""" import logging import voluptuous as vol from homeassistant.components.climate import PLATFORM_SCHEMA, ClimateEntity from homeassistant.components.climate.const import ( ATTR_FAN_MODE, ATTR_HVAC_MODE, ATTR_PRESET_MODE, ATTR_SWING_MODE, HVAC_MODE_COOL, HVAC_MODE_DRY, HVAC_MODE_FAN_ONLY, HVAC_MODE_HEAT, HVAC_MODE_HEAT_COOL, HVAC_MODE_OFF, PRESET_AWAY, PRESET_BOOST, PRESET_ECO, PRESET_NONE, SUPPORT_FAN_MODE, SUPPORT_PRESET_MODE, SUPPORT_SWING_MODE, SUPPORT_TARGET_TEMPERATURE, ) from homeassistant.const import ATTR_TEMPERATURE, CONF_HOST, CONF_NAME, TEMP_CELSIUS import homeassistant.helpers.config_validation as cv from . import DOMAIN as DAIKIN_DOMAIN from .const import ( ATTR_INSIDE_TEMPERATURE, ATTR_OUTSIDE_TEMPERATURE, ATTR_STATE_OFF, ATTR_STATE_ON, ATTR_TARGET_TEMPERATURE, ) _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME): cv.string} ) HA_STATE_TO_DAIKIN = { HVAC_MODE_FAN_ONLY: "fan", HVAC_MODE_DRY: "dry", HVAC_MODE_COOL: "cool", HVAC_MODE_HEAT: "hot", HVAC_MODE_HEAT_COOL: "auto", HVAC_MODE_OFF: "off", } DAIKIN_TO_HA_STATE = { "fan": HVAC_MODE_FAN_ONLY, "dry": HVAC_MODE_DRY, "cool": HVAC_MODE_COOL, "hot": HVAC_MODE_HEAT, "auto": HVAC_MODE_HEAT_COOL, "off": HVAC_MODE_OFF, } HA_PRESET_TO_DAIKIN = { PRESET_AWAY: "on", PRESET_NONE: "off", PRESET_BOOST: "powerful", PRESET_ECO: "econo", } HA_ATTR_TO_DAIKIN = { ATTR_PRESET_MODE: "en_hol", ATTR_HVAC_MODE: "mode", ATTR_FAN_MODE: "f_rate", ATTR_SWING_MODE: "f_dir", ATTR_INSIDE_TEMPERATURE: "htemp", ATTR_OUTSIDE_TEMPERATURE: "otemp", ATTR_TARGET_TEMPERATURE: "stemp", } DAIKIN_ATTR_ADVANCED = "adv" async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Old way of setting up the Daikin HVAC platform. Can only be called when a user accidentally mentions the platform in their config. But even in that case it would have been ignored. """ async def async_setup_entry(hass, entry, async_add_entities): """Set up Daikin climate based on config_entry.""" daikin_api = hass.data[DAIKIN_DOMAIN].get(entry.entry_id) async_add_entities([DaikinClimate(daikin_api)], update_before_add=True) class DaikinClimate(ClimateEntity): """Representation of a Daikin HVAC.""" def __init__(self, api): """Initialize the climate device.""" self._api = api self._list = { ATTR_HVAC_MODE: list(HA_STATE_TO_DAIKIN), ATTR_FAN_MODE: self._api.device.fan_rate, ATTR_SWING_MODE: self._api.device.swing_modes, } self._supported_features = SUPPORT_TARGET_TEMPERATURE if ( self._api.device.support_away_mode or self._api.device.support_advanced_modes ): self._supported_features |= SUPPORT_PRESET_MODE if self._api.device.support_fan_rate: self._supported_features |= SUPPORT_FAN_MODE if self._api.device.support_swing_mode: self._supported_features |= SUPPORT_SWING_MODE async def _set(self, settings): """Set device settings using API.""" values = {} for attr in [ATTR_TEMPERATURE, ATTR_FAN_MODE, ATTR_SWING_MODE, ATTR_HVAC_MODE]: value = settings.get(attr) if value is None: continue daikin_attr = HA_ATTR_TO_DAIKIN.get(attr) if daikin_attr is not None: if attr == ATTR_HVAC_MODE: values[daikin_attr] = HA_STATE_TO_DAIKIN[value] elif value in self._list[attr]: values[daikin_attr] = value.lower() else: _LOGGER.error("Invalid value %s for %s", attr, value) # temperature elif attr == ATTR_TEMPERATURE: try: values[HA_ATTR_TO_DAIKIN[ATTR_TARGET_TEMPERATURE]] = str(int(value)) except ValueError: _LOGGER.error("Invalid temperature %s", value) if values: await self._api.device.set(values) @property def supported_features(self): """Return the list of supported features.""" return self._supported_features @property def name(self): """Return the name of the thermostat, if any.""" return self._api.name @property def unique_id(self): """Return a unique ID.""" return self._api.device.mac @property def temperature_unit(self): """Return the unit of measurement which this thermostat uses.""" return TEMP_CELSIUS @property def current_temperature(self): """Return the current temperature.""" return self._api.device.inside_temperature @property def target_temperature(self): """Return the temperature we try to reach.""" return self._api.device.target_temperature @property def target_temperature_step(self): """Return the supported step of target temperature.""" return 1 async def async_set_temperature(self, **kwargs): """Set new target temperature.""" await self._set(kwargs) @property def hvac_mode(self): """Return current operation ie. heat, cool, idle.""" daikin_mode = self._api.device.represent(HA_ATTR_TO_DAIKIN[ATTR_HVAC_MODE])[1] return DAIKIN_TO_HA_STATE.get(daikin_mode, HVAC_MODE_HEAT_COOL) @property def hvac_modes(self): """Return the list of available operation modes.""" return self._list.get(ATTR_HVAC_MODE) async def async_set_hvac_mode(self, hvac_mode): """Set HVAC mode.""" await self._set({ATTR_HVAC_MODE: hvac_mode}) @property def fan_mode(self): """Return the fan setting.""" return self._api.device.represent(HA_ATTR_TO_DAIKIN[ATTR_FAN_MODE])[1].title() async def async_set_fan_mode(self, fan_mode): """Set fan mode.""" await self._set({ATTR_FAN_MODE: fan_mode}) @property def fan_modes(self): """List of available fan modes.""" return self._list.get(ATTR_FAN_MODE) @property def swing_mode(self): """Return the fan setting.""" return self._api.device.represent(HA_ATTR_TO_DAIKIN[ATTR_SWING_MODE])[1].title() async def async_set_swing_mode(self, swing_mode): """Set new target temperature.""" await self._set({ATTR_SWING_MODE: swing_mode}) @property def swing_modes(self): """List of available swing modes.""" return self._list.get(ATTR_SWING_MODE) @property def preset_mode(self): """Return the preset_mode.""" if ( self._api.device.represent(HA_ATTR_TO_DAIKIN[ATTR_PRESET_MODE])[1] == HA_PRESET_TO_DAIKIN[PRESET_AWAY] ): return PRESET_AWAY if ( HA_PRESET_TO_DAIKIN[PRESET_BOOST] in self._api.device.represent(DAIKIN_ATTR_ADVANCED)[1] ): return PRESET_BOOST if ( HA_PRESET_TO_DAIKIN[PRESET_ECO] in self._api.device.represent(DAIKIN_ATTR_ADVANCED)[1] ): return PRESET_ECO return PRESET_NONE async def async_set_preset_mode(self, preset_mode): """Set preset mode.""" if preset_mode == PRESET_AWAY: await self._api.device.set_holiday(ATTR_STATE_ON) elif preset_mode == PRESET_BOOST: await self._api.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_BOOST], ATTR_STATE_ON ) elif preset_mode == PRESET_ECO: await self._api.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_ECO], ATTR_STATE_ON ) else: if self.preset_mode == PRESET_AWAY: await self._api.device.set_holiday(ATTR_STATE_OFF) elif self.preset_mode == PRESET_BOOST: await self._api.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_BOOST], ATTR_STATE_OFF ) elif self.preset_mode == PRESET_ECO: await self._api.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_ECO], ATTR_STATE_OFF ) @property def preset_modes(self): """List of available preset modes.""" ret = [PRESET_NONE] if self._api.device.support_away_mode: ret.append(PRESET_AWAY) if self._api.device.support_advanced_modes: ret += [PRESET_ECO, PRESET_BOOST] return ret async def async_update(self): """Retrieve latest state.""" await self._api.async_update() async def async_turn_on(self): """Turn device on.""" await self._api.device.set({}) async def async_turn_off(self): """Turn device off.""" await self._api.device.set( {HA_ATTR_TO_DAIKIN[ATTR_HVAC_MODE]: HA_STATE_TO_DAIKIN[HVAC_MODE_OFF]} ) @property def device_info(self): """Return a device description for device registry.""" return self._api.device_info
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/daikin/climate.py
"""Describe group states.""" from homeassistant.components.group import GroupIntegrationRegistry from homeassistant.const import STATE_OFF from homeassistant.core import callback from homeassistant.helpers.typing import HomeAssistantType from . import ( STATE_ECO, STATE_ELECTRIC, STATE_GAS, STATE_HEAT_PUMP, STATE_HIGH_DEMAND, STATE_PERFORMANCE, ) @callback def async_describe_on_off_states( hass: HomeAssistantType, registry: GroupIntegrationRegistry ) -> None: """Describe group on off states.""" registry.on_off_states( { STATE_ECO, STATE_ELECTRIC, STATE_PERFORMANCE, STATE_HIGH_DEMAND, STATE_HEAT_PUMP, STATE_GAS, }, STATE_OFF, )
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/water_heater/group.py
"""Describe logbook events.""" from homeassistant.core import callback from .const import DOMAIN, EVENT_ALEXA_SMART_HOME @callback def async_describe_events(hass, async_describe_event): """Describe logbook events.""" @callback def async_describe_logbook_event(event): """Describe a logbook event.""" data = event.data entity_id = data["request"].get("entity_id") if entity_id: state = hass.states.get(entity_id) name = state.name if state else entity_id message = f"send command {data['request']['namespace']}/{data['request']['name']} for {name}" else: message = ( f"send command {data['request']['namespace']}/{data['request']['name']}" ) return {"name": "Amazon Alexa", "message": message, "entity_id": entity_id} async_describe_event(DOMAIN, EVENT_ALEXA_SMART_HOME, async_describe_logbook_event)
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/alexa/logbook.py
import arrow from datetime import datetime import pendulum import re def human_to_dt(ts): t = arrow.utcnow() if ts == 'now': return t if ts == 'hour': return t.replace(minute=0, second=0, microsecond=0) if ts == 'day': return t.replace(hour=0, minute=0, second=0, microsecond=0) if ts == 'month': return t.replace(day=1, hour=0, minute=0, second=0, microsecond=0) def parse_timestamp(ts): if isinstance(ts, arrow.Arrow): return ts t = human_to_dt(ts) if t: return t try: t = arrow.get(ts) if t.year < 1980: if type(ts) == datetime: ts = str(ts) if len(ts) == 8: ts = '{}T00:00:00Z'.format(ts) t = arrow.get(ts, 'YYYYMMDDTHH:mm:ss') if t.year < 1970: raise RuntimeError('invalid timestamp: %s' % ts) return t except ValueError as e: if len(ts) == 14: match = re.search('^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})$', ts) if match: ts = '{}-{}-{}T{}:{}:{}Z'.format(match.group(1), match.group(2), match.group(3), match.group(4), match.group(5), match.group(6)) t = arrow.get(ts, 'YYYY-MM-DDTHH:mm:ss') return t else: raise RuntimeError('Invalid Timestamp: %s' % ts) if len(ts) == 16: # 20160219T224322Z match = re.search('^(\d{4})(\d{2})(\d{2})T(\d{2})(\d{2})(\d{2})Z$', ts) if match: ts = '{}-{}-{}T{}:{}:{}Z'.format(match.group(1), match.group(2), match.group(3), match.group(4), match.group(5), match.group(6)) t = arrow.get(ts, 'YYYY-MM-DDTHH:mm:ss') return t else: raise RuntimeError('Invalid Timestamp: %s' % ts) else: raise RuntimeError('Invalid Timestamp: %s' % ts) except arrow.parser.ParserError as e: t = pendulum.parse(ts, strict=False) t = arrow.get(t) return t else: raise RuntimeError('Invalid Timestamp: %s' % ts)
# -*- coding: utf-8 -*- from csirtg_indicator.format.zjson import Json, get_lines import pytest from csirtg_indicator import Indicator @pytest.fixture def indicator(): i = { 'indicator': "example.com", 'itype': 'fqdn', 'provider': "me.com", 'tlp': "amber", 'confidence': "85", 'reporttime': '2015-01-01T00:00:00Z' } return Indicator(**i) @pytest.fixture def indicator_unicode(indicator): indicator.indicator = 'http://xz.job391.com/down/����࿪��@89_1_60' return indicator def test_format_json(indicator): data = [indicator, indicator] print(Json(data)) assert Json(data) def test_format_json2(indicator): data = [indicator, indicator] n = list(get_lines(data)) assert len(n) > 0 if __name__ == '__main__': test_format_json()
csirtgadgets/csirtg-indicator-py
test/format/test_json.py
csirtg_indicator/utils/ztime.py
#! /usr/bin/env python # -*- coding: utf-8 -*- import sys # Local module from .structure import Chain, Atom from .PDB import PDB # load MDAnalysis with limited support for Python 3 import warnings with warnings.catch_warnings(): warnings.simplefilter("ignore") import MDAnalysis # Create the __all__ keyword according to the conditional import __all__ = ['chains_from_files', 'chains_from_trajectory'] def chains_from_files(path_list): for pdb_name in path_list: pdb = PDB(pdb_name) for chain in pdb.get_chains(): # build comment comment = pdb_name if chain.model: comment += " | model %s" % (chain.model) if chain.name: comment += " | chain %s" % (chain.name) yield comment, chain print("Read {0} chain(s) in {1}".format(pdb.nb_chains, pdb_name), file=sys.stderr) def chains_from_trajectory(trajectory, topology): universe = MDAnalysis.Universe(topology, trajectory) selection = universe.select_atoms("backbone") #Initialize structure with the selection structure = Chain() for atm in selection: atom = Atom.read_from_xtc(atm) # append structure with atom structure.add_atom(atom) nb_frames = len(universe.trajectory) # Print the first frame print("Frame {}/{}.".format(1, nb_frames), file=sys.stderr) for ts in universe.trajectory: #Update only with new coordinates structure.set_coordinates(selection.positions) # define structure comment comment = "%s | frame %s" % (trajectory, ts.frame) yield comment, structure # Progress bar # Print one frame every 100. if ((ts.frame + 1) % 100 == 0): print("Frame {}/{}.".format(ts.frame + 1, nb_frames), file=sys.stderr) # Print the last frame print("Frame {}/{}.".format(nb_frames, nb_frames), file=sys.stderr)
#! /usr/bin/env python # -*- coding: utf-8 -*- """ Unit tests for PBxplore. Tests functions from different programs. 2014 - P. Poulain """ # ============================================================================= # Modules # ============================================================================= import collections import os import numpy import pytest import pbxplore as pbx from pbxplore.structure import structure import MDAnalysis here = os.path.abspath(os.path.dirname(__file__)) # ============================================================================= # Classes for tests # ============================================================================= Result = collections.namedtuple('Result', ['A', 'B', 'C', 'D', 'torsion']) class TestStructurelib(object): """ Tests for Structurelib """ @pytest.mark.parametrize('result', (Result((-7.28, -9.262, 5.077), (-7.526, -10.643, 5.529), (-6.221, -11.438, 5.555), (-6.289, -12.685, 5.931), -179.663656153), Result((-1.373, -8.817, -4.389), (-1.203, -8.335, -5.792), (-1.891, -6.977, -5.927), (-1.918, -6.429, -7.107), -176.048770127), Result((-0.533, -8.42, -3.47 ), (-1.373, -8.817, -4.389), (-1.203, -8.335, -5.792), (-1.891, -6.977, -5.927), -84.8356057692), Result((-1.918, -6.429, -7.107), (-2.609, -5.125, -7.305), (-4.108, -5.392, -7.331), (-4.469, -6.494, -7.911), -36.8942888266), Result((-11.285, 6.472, -7.44 ), (-12.62, 5.829, -7.425 ), (-13.585, 6.626, -6.544), (-13.098, 7.621, -5.858), -6.58786169376), Result((-11.284, -0.971, -2.679), (-12.65, -0.794, -3.226), (-13.665, -1.664, -2.479), (-13.262, -2.363, -1.452), 3.91626706556), Result((-2.004, -10.892, -2.611), (-1.87, -9.835, -1.853), (-0.726, -8.877, -2.011), (-0.533, -8.42, -3.47), 50.065196067), Result((11.174, -6.725, 0.458), (10.732, -7.258, -0.86), (9.27, -6.869, -1.096), (8.741, -7.185, -2.245), 175.872397707)) ) def test_get_dihedral(self, result): """ Test for get_dihedral() """ torsion = structure.get_dihedral(result.A, result.B, result.C, result.D) assert torsion == pytest.approx(result.torsion) def test_loader_PDB(self): """ Test for API loader function on PDBs """ filename = os.path.join(here, "test_data/2LFU.pdb") comment, chain = list(pbx.chains_from_files([filename]))[0] ref_comment = "{0} | model 1 | chain A".format(filename) ref_chain = "Chain A / model 1: 2372 atoms" assert ref_comment == comment assert ref_chain == format(chain) def test_loader_xtc(self): """ Test for API load function on xtc files """ topol = os.path.join(here, "test_data/barstar_md_traj.gro") traj = os.path.join(here, "test_data/barstar_md_traj.xtc") chains = list(pbx.chains_from_trajectory(traj, topol)) comment, chain = chains[0] ref_comment = "{0} | frame 0".format(traj) ref_chain = "Chain / model : 355 atoms" assert ref_comment == comment assert ref_chain == format(chain) comment, chain = chains[-1] ref_comment = "{0} | frame 9".format(traj) assert ref_comment == comment assert ref_chain == format(chain) class TestAtomClass(object): """ Tests for the Atom class in PDBlib """ PDBx_fields = ['group_PDB', 'id', 'type_symbol', 'label_atom_id', 'label_alt_id', 'label_comp_id', 'label_asym_id', 'label_entity_id', 'label_seq_id', 'pdbx_PDB_ins_code', 'Cartn_x', 'Cartn_y', 'Cartn_z', 'occupancy', 'B_iso_or_equiv', 'Cartn_x_esd', 'Cartn_y_esd', 'Cartn_z_esd', 'occupancy_esd', 'B_iso_or_equiv_esd', 'pdbx_formal_charge', 'auth_seq_id', 'auth_comp_id', 'auth_asym_id', 'auth_atom_id', 'pdbx_PDB_model_num'] @pytest.mark.parametrize( 'line, expected', (("ATOM 512 N GLU A 32 -1.870 -9.835 -1.853 1.00 0.56 N ", [-1.87, -9.835, -1.853]), ("ATOM 1424 CA SER A 89 7.604 11.308 1.435 1.00 0.62 C ", [7.604, 11.308, 1.435]), ("ATOM 1167 CG2 VAL B 50 9.294 44.541 -4.830 1.00 27.62 C ", [9.294, 44.541, -4.83])) ) def test_read_from_PDB(self, line, expected): """ Tests for read_from_PDB() """ atom = structure.Atom.read_from_PDB(line) assert atom.coords == pytest.approx(expected) def test_read_PDB_line_short(self): """ Test when PDB line is too short """ with pytest.raises(structure.AtomError): structure.Atom.read_from_PDB("ATOM 512 N GLU A 32 -1.870 -9.835") @pytest.mark.parametrize( 'line,coordinates', (("ATOM 4769 H HB . ILE A 1 35 ? -20.422 5.104 -0.207 1.00 0.00 ? ? ? ? ? ? 277 ILE A HB 3", [-20.422, 5.104, -0.207]), ("ATOM 18201 H HG21 . THR A 1 140 ? 11.080 -12.466 -8.977 1.00 0.00 ? ? ? ? ? ? 382 THR A HG21 8", [11.08, -12.466, -8.977]), ("ATOM 23720 H HE2 . HIS A 1 193 ? 13.974 24.297 0.352 1.00 0.00 ? ? ? ? ? ? 435 HIS A HE2 10", [13.974, 24.297, 0.352])) ) def test_read_from_PDBx(self, line, coordinates): """ Tests for read_from_PDBx() """ atom = structure.Atom.read_from_PDBx(line, self.PDBx_fields) assert atom.coords == pytest.approx(coordinates) @pytest.mark.parametrize( 'line', ("ATOM 4769 H HB . ILE A 1 35 ? -20.422 5.104", "ATOM 4769 H HB . ILE A 1 XXX ? -20.422 5.104 -0.207 1.00 0.00 ? ? ? ? ? ? 277 ILE A HB 3",) ) def test_read_PDBx_failed_line(self, line): """ Test when PDBx line is not correctly formated """ with pytest.raises(structure.AtomError): structure.Atom.read_from_PDBx(line, self.PDBx_fields) def test_read_from_xtc(self): """ Tests for read_from_xtc() """ topology = os.path.join(here, "test_data/barstar_md_traj.gro") traj = os.path.join(here, "test_data/barstar_md_traj.xtc") universe = MDAnalysis.Universe(topology, traj) selection = universe.select_atoms("backbone") # First timeframe atom = structure.Atom.read_from_xtc(selection[0]) assert atom.resid == 1 assert atom.name == "N" for a, b in zip(atom.coords, [21.68, 33.87, 36.18]): assert a == pytest.approx(b, abs=1e-3) atom = structure.Atom.read_from_xtc(selection[-1]) assert atom.resid == 89 assert atom.name == "C" for a, b in zip(atom.coords, [40.14, 38.75, 28.42]): assert a == pytest.approx(b, abs=1e-3) #Last one ts = universe.trajectory[-1] atom = structure.Atom.read_from_xtc(selection[0]) for a, b in zip(atom.coords, [20.63, 38.43, 32.09]): assert a == pytest.approx(b, abs=1e-3) atom = structure.Atom.read_from_xtc(selection[-1]) for a, b in zip(atom.coords, [39.14, 39.77, 25.60]): assert a == pytest.approx(b, abs=1e-3) class TestChainClass(object): """ Tests for Chain class in PDBlib """ @staticmethod @pytest.fixture def chain(): """ Run before each test. Create a chain object """ lines = ("ATOM 840 C ARG B 11 22.955 23.561 -4.012 1.00 28.07 C ", "ATOM 849 N SER B 12 22.623 24.218 -2.883 1.00 24.77 N ", "ATOM 850 CA SER B 12 22.385 23.396 -1.637 1.00 21.99 C ", "ATOM 851 C SER B 12 21.150 24.066 -0.947 1.00 32.67 C ", "ATOM 855 N ILE B 13 20.421 23.341 -0.088 1.00 30.25 N ") chain = structure.Chain() for line in lines: atom = structure.Atom.read_from_PDB(line) chain.add_atom(atom) return chain def test_size(self, chain): """ Tests for size() """ assert chain.size() == 5 @pytest.mark.parametrize( 'resid,angles', ((11, {'phi': None, 'psi': None}), (12, {'phi': -139.77684605036447, 'psi': 157.94348570201197}), (13, {'phi': None, 'psi': None})) ) def test_get_phi_psi_angles(self, chain, resid, angles): """ Tests for get_phi_psi_angles() """ phi_psi = chain.get_phi_psi_angles() assert ( (angles["phi"] is None and phi_psi[resid]["phi"] is None) or angles["phi"] == pytest.approx(phi_psi[resid]["phi"]) ) def test_set_coordinates(self, chain): """ Tests for coordinates update """ new_coords = numpy.array([[1.00, 1.00, 1.00], [2.00, 2.00, 2.00], [3.00, 3.00, 3.00], [4.00, 4.00, 4.00], [5.00, 5.00, 5.00]]) chain.set_coordinates(new_coords) for atom, ref_coords in zip(chain, new_coords): numpy.testing.assert_array_almost_equal(atom.coords, ref_coords) # Wrong shape wrong_coords = new_coords[:-1] with pytest.raises(ValueError): chain.set_coordinates(wrong_coords) class TestPDBClass(object): """ Tests for PDB class in Structurelib """ @staticmethod @pytest.fixture def chains_1BTA(): filename = os.path.join(here, "test_data/1BTA.pdb") pdb = pbx.structure.PDB.PDB(filename) return list(pdb.get_chains()) @staticmethod @pytest.fixture def chains_1AY7_pdb(): filename = os.path.join(here, "test_data/1AY7.pdb") pdb = pbx.structure.PDB.PDB(filename) return list(pdb.get_chains()) @staticmethod @pytest.fixture def chains_1AY7_pdbx(): filename = os.path.join(here, "test_data/1AY7.cif.gz") pdb = pbx.structure.PDB.PDB(filename) return list(pdb.get_chains()) @staticmethod @pytest.fixture def chains_2LFU(): filename = os.path.join(here, "test_data/2LFU.pdb") pdb = pbx.structure.PDB.PDB(filename) return list(pdb.get_chains()) @pytest.mark.parametrize( 'index,ref', ((0, "ATOM 1 N LYS A 1 -8.655 5.770 8.371 0.00 0.00 "), (-1, "ATOM 1434 HG SER A 89 6.663 12.440 4.229 0.00 0.00 ")) ) def test_read_single_PDB(self, chains_1BTA, index, ref): """ Tests for single chain in one PDB """ chain = chains_1BTA[0] assert chain[index].format() == ref @pytest.mark.parametrize( 'chain_idx,ref_first,ref_last', ((0, "ATOM 1 N ASP A 1 11.860 13.207 12.724 0.00 0.00 ", "ATOM 751 OXT CYS A 96 9.922 16.291 36.110 0.00 0.00 "), (1, "ATOM 753 N LYS B 1 11.318 46.585 0.493 0.00 0.00 ", "ATOM 1489 OXT SER B 89 13.857 33.192 -16.133 0.00 0.00 ")) ) def test_read_multiple_PDB(self, chains_1AY7_pdb, chain_idx, ref_first, ref_last): """ Tests for multiple chains in one file. This test is called for both PDB abd PDBx format because the `chains_1AY7` fixture is parametrixed for both extensions. """ chain = chains_1AY7_pdb[chain_idx] assert chain[0].format() == ref_first assert chain[-1].format() == ref_last def test_read_models_PDB(self, chains_2LFU): """ Tests for multiple models in one PDB """ #3 models of one chain assert len(chains_2LFU) == 3 model_3 = chains_2LFU[2] ref = "ATOM 1 N ASN A 276 -21.874 9.349 4.010 0.00 0.00 " assert model_3[0].format() == ref assert model_3.model == "3" # This test could be factorized with `test_read_multiple_PDB` by # parametrizing the `chains_1AY7_*` fixture. Yet, the atom number of the # second chain are shifted by one between the PDB and the PDBx file. This # is due to the TER record counting in the sequence of atom numbers in the # PDB file. @pytest.mark.parametrize( 'chain_idx,ref_first,ref_last', ((0, "ATOM 1 N ASP A 1 11.860 13.207 12.724 0.00 0.00 ", "ATOM 751 OXT CYS A 96 9.922 16.291 36.110 0.00 0.00 "), (1, "ATOM 752 N LYS B 1 11.318 46.585 0.493 0.00 0.00 ", "ATOM 1488 OXT SER B 89 13.857 33.192 -16.133 0.00 0.00 ")) ) def test_read_multiple_PDBx(self, chains_1AY7_pdbx, chain_idx, ref_first, ref_last): """ Tests for multiple chains in one file. This test is called for both PDB abd PDBx format because the `chains_1AY7` fixture is parametrixed for both extensions. """ chain = chains_1AY7_pdbx[chain_idx] assert chain[0].format() == ref_first assert chain[-1].format() == ref_last class TestIolib(object): """ Tests for Iolib """ def test_read_fasta(self): """ Test for parsing mulitple fastas """ filename = os.path.join(here, "test_data/1AY7.pdb.PB.fasta") headers, sequences = pbx.io.read_fasta(filename) assert headers == ['test_data/1AY7.pdb | chain A', 'test_data/1AY7.pdb | chain B'] assert sequences == ['ZZbjadfklmcfklmmmmmmmmnnpaafbfkgo' 'pacehlnomaccddehjaccdddddehklpnbja' 'dcdddfbehiacddfegolaccdddfkZZ', 'ZZcddfklpcbfklmmmmmmmmnopafklgoiakl' 'mmmmmmmmpacddddddehkllmmmmnnommmmmm' 'mmmmmmmmnopacddddZZ']
pierrepo/PBxplore
pbxplore/tests/test_functions.py
pbxplore/structure/loader.py
"""Support for Goal Zero Yeti Sensors.""" from homeassistant.components.binary_sensor import BinarySensorEntity from homeassistant.const import CONF_NAME from . import YetiEntity from .const import BINARY_SENSOR_DICT, DATA_KEY_API, DATA_KEY_COORDINATOR, DOMAIN PARALLEL_UPDATES = 0 async def async_setup_entry(hass, entry, async_add_entities): """Set up the Goal Zero Yeti sensor.""" name = entry.data[CONF_NAME] goalzero_data = hass.data[DOMAIN][entry.entry_id] sensors = [ YetiBinarySensor( goalzero_data[DATA_KEY_API], goalzero_data[DATA_KEY_COORDINATOR], name, sensor_name, entry.entry_id, ) for sensor_name in BINARY_SENSOR_DICT ] async_add_entities(sensors, True) class YetiBinarySensor(YetiEntity, BinarySensorEntity): """Representation of a Goal Zero Yeti sensor.""" def __init__(self, api, coordinator, name, sensor_name, server_unique_id): """Initialize a Goal Zero Yeti sensor.""" super().__init__(api, coordinator, name, server_unique_id) self._condition = sensor_name variable_info = BINARY_SENSOR_DICT[sensor_name] self._condition_name = variable_info[0] self._icon = variable_info[2] self._device_class = variable_info[1] @property def name(self): """Return the name of the sensor.""" return f"{self._name} {self._condition_name}" @property def unique_id(self): """Return the unique id of the sensor.""" return f"{self._server_unique_id}/{self._condition_name}" @property def is_on(self): """Return if the service is on.""" if self.api.data: return self.api.data[self._condition] == 1 return False @property def icon(self): """Icon to use in the frontend, if any.""" return self._icon
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/goalzero/binary_sensor.py
"""Describe group states.""" from homeassistant.components.group import GroupIntegrationRegistry from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED from homeassistant.core import callback from homeassistant.helpers.typing import HomeAssistantType @callback def async_describe_on_off_states( hass: HomeAssistantType, registry: GroupIntegrationRegistry ) -> None: """Describe group on off states.""" registry.on_off_states({STATE_LOCKED}, STATE_UNLOCKED)
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/lock/group.py
"""Home Assistant representation of an UPnP/IGD.""" import asyncio from ipaddress import IPv4Address from typing import List, Mapping from async_upnp_client import UpnpFactory from async_upnp_client.aiohttp import AiohttpSessionRequester from async_upnp_client.profiles.igd import IgdDevice from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import HomeAssistantType import homeassistant.util.dt as dt_util from .const import ( BYTES_RECEIVED, BYTES_SENT, CONF_LOCAL_IP, DISCOVERY_LOCATION, DISCOVERY_ST, DISCOVERY_UDN, DISCOVERY_USN, DOMAIN, DOMAIN_CONFIG, LOGGER as _LOGGER, PACKETS_RECEIVED, PACKETS_SENT, TIMESTAMP, ) class Device: """Home Assistant representation of an UPnP/IGD.""" def __init__(self, igd_device): """Initialize UPnP/IGD device.""" self._igd_device: IgdDevice = igd_device self._mapped_ports = [] @classmethod async def async_discover(cls, hass: HomeAssistantType) -> List[Mapping]: """Discover UPnP/IGD devices.""" _LOGGER.debug("Discovering UPnP/IGD devices") local_ip = None if DOMAIN in hass.data and DOMAIN_CONFIG in hass.data[DOMAIN]: local_ip = hass.data[DOMAIN][DOMAIN_CONFIG].get(CONF_LOCAL_IP) if local_ip: local_ip = IPv4Address(local_ip) discovery_infos = await IgdDevice.async_search(source_ip=local_ip, timeout=10) # add extra info and store devices devices = [] for discovery_info in discovery_infos: discovery_info[DISCOVERY_UDN] = discovery_info["_udn"] discovery_info[DISCOVERY_ST] = discovery_info["st"] discovery_info[DISCOVERY_LOCATION] = discovery_info["location"] usn = f"{discovery_info[DISCOVERY_UDN]}::{discovery_info[DISCOVERY_ST]}" discovery_info[DISCOVERY_USN] = usn _LOGGER.debug("Discovered device: %s", discovery_info) devices.append(discovery_info) return devices @classmethod async def async_create_device(cls, hass: HomeAssistantType, ssdp_location: str): """Create UPnP/IGD device.""" # build async_upnp_client requester session = async_get_clientsession(hass) requester = AiohttpSessionRequester(session, True, 10) # create async_upnp_client device factory = UpnpFactory(requester, disable_state_variable_validation=True) upnp_device = await factory.async_create_device(ssdp_location) igd_device = IgdDevice(upnp_device, None) return cls(igd_device) @property def udn(self) -> str: """Get the UDN.""" return self._igd_device.udn @property def name(self) -> str: """Get the name.""" return self._igd_device.name @property def manufacturer(self) -> str: """Get the manufacturer.""" return self._igd_device.manufacturer @property def model_name(self) -> str: """Get the model name.""" return self._igd_device.model_name @property def device_type(self) -> str: """Get the device type.""" return self._igd_device.device_type @property def unique_id(self) -> str: """Get the unique id.""" return f"{self.udn}::{self.device_type}" def __str__(self) -> str: """Get string representation.""" return f"IGD Device: {self.name}/{self.udn}" async def async_get_traffic_data(self) -> Mapping[str, any]: """ Get all traffic data in one go. Traffic data consists of: - total bytes sent - total bytes received - total packets sent - total packats received Data is timestamped. """ _LOGGER.debug("Getting traffic statistics from device: %s", self) values = await asyncio.gather( self._igd_device.async_get_total_bytes_received(), self._igd_device.async_get_total_bytes_sent(), self._igd_device.async_get_total_packets_received(), self._igd_device.async_get_total_packets_sent(), ) return { TIMESTAMP: dt_util.utcnow(), BYTES_RECEIVED: values[0], BYTES_SENT: values[1], PACKETS_RECEIVED: values[2], PACKETS_SENT: values[3], }
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/upnp/device.py
"""Config flow for UniFi.""" import socket import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, CONF_VERIFY_SSL, ) from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from .const import ( CONF_ALLOW_BANDWIDTH_SENSORS, CONF_ALLOW_UPTIME_SENSORS, CONF_BLOCK_CLIENT, CONF_CONTROLLER, CONF_DETECTION_TIME, CONF_IGNORE_WIRED_BUG, CONF_POE_CLIENTS, CONF_SITE_ID, CONF_SSID_FILTER, CONF_TRACK_CLIENTS, CONF_TRACK_DEVICES, CONF_TRACK_WIRED_CLIENTS, CONTROLLER_ID, DEFAULT_POE_CLIENTS, DOMAIN as UNIFI_DOMAIN, LOGGER, ) from .controller import get_controller from .errors import AlreadyConfigured, AuthenticationRequired, CannotConnect DEFAULT_PORT = 8443 DEFAULT_SITE_ID = "default" DEFAULT_VERIFY_SSL = False @callback def get_controller_id_from_config_entry(config_entry): """Return controller with a matching bridge id.""" return CONTROLLER_ID.format( host=config_entry.data[CONF_CONTROLLER][CONF_HOST], site=config_entry.data[CONF_CONTROLLER][CONF_SITE_ID], ) class UnifiFlowHandler(config_entries.ConfigFlow, domain=UNIFI_DOMAIN): """Handle a UniFi config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL @staticmethod @callback def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return UnifiOptionsFlowHandler(config_entry) def __init__(self): """Initialize the UniFi flow.""" self.config = None self.desc = None self.sites = None async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" errors = {} if user_input is not None: try: self.config = { CONF_HOST: user_input[CONF_HOST], CONF_USERNAME: user_input[CONF_USERNAME], CONF_PASSWORD: user_input[CONF_PASSWORD], CONF_PORT: user_input.get(CONF_PORT), CONF_VERIFY_SSL: user_input.get(CONF_VERIFY_SSL), CONF_SITE_ID: DEFAULT_SITE_ID, } controller = await get_controller(self.hass, **self.config) self.sites = await controller.sites() return await self.async_step_site() except AuthenticationRequired: errors["base"] = "faulty_credentials" except CannotConnect: errors["base"] = "service_unavailable" except Exception: # pylint: disable=broad-except LOGGER.error( "Unknown error connecting with UniFi Controller at %s", user_input[CONF_HOST], ) return self.async_abort(reason="unknown") host = "" if await async_discover_unifi(self.hass): host = "unifi" return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Required(CONF_HOST, default=host): str, vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, vol.Optional(CONF_PORT, default=DEFAULT_PORT): int, vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): bool, } ), errors=errors, ) async def async_step_site(self, user_input=None): """Select site to control.""" errors = {} if user_input is not None: try: desc = user_input.get(CONF_SITE_ID, self.desc) for site in self.sites.values(): if desc == site["desc"]: self.config[CONF_SITE_ID] = site["name"] break for entry in self._async_current_entries(): controller = entry.data[CONF_CONTROLLER] if ( controller[CONF_HOST] == self.config[CONF_HOST] and controller[CONF_SITE_ID] == self.config[CONF_SITE_ID] ): raise AlreadyConfigured data = {CONF_CONTROLLER: self.config} return self.async_create_entry(title=desc, data=data) except AlreadyConfigured: return self.async_abort(reason="already_configured") if len(self.sites) == 1: self.desc = next(iter(self.sites.values()))["desc"] return await self.async_step_site(user_input={}) sites = [] for site in self.sites.values(): sites.append(site["desc"]) return self.async_show_form( step_id="site", data_schema=vol.Schema({vol.Required(CONF_SITE_ID): vol.In(sites)}), errors=errors, ) class UnifiOptionsFlowHandler(config_entries.OptionsFlow): """Handle Unifi options.""" def __init__(self, config_entry): """Initialize UniFi options flow.""" self.config_entry = config_entry self.options = dict(config_entry.options) self.controller = None async def async_step_init(self, user_input=None): """Manage the UniFi options.""" self.controller = self.hass.data[UNIFI_DOMAIN][self.config_entry.entry_id] self.options[CONF_BLOCK_CLIENT] = self.controller.option_block_clients if self.show_advanced_options: return await self.async_step_device_tracker() return await self.async_step_simple_options() async def async_step_simple_options(self, user_input=None): """For simple Jack.""" if user_input is not None: self.options.update(user_input) return await self._update_options() clients_to_block = {} for client in self.controller.api.clients.values(): clients_to_block[ client.mac ] = f"{client.name or client.hostname} ({client.mac})" return self.async_show_form( step_id="simple_options", data_schema=vol.Schema( { vol.Optional( CONF_TRACK_CLIENTS, default=self.controller.option_track_clients, ): bool, vol.Optional( CONF_TRACK_DEVICES, default=self.controller.option_track_devices, ): bool, vol.Optional( CONF_BLOCK_CLIENT, default=self.options[CONF_BLOCK_CLIENT] ): cv.multi_select(clients_to_block), } ), ) async def async_step_device_tracker(self, user_input=None): """Manage the device tracker options.""" if user_input is not None: self.options.update(user_input) return await self.async_step_client_control() ssids = ( set(self.controller.api.wlans) | { f"{wlan.name}{wlan.name_combine_suffix}" for wlan in self.controller.api.wlans.values() if not wlan.name_combine_enabled } | { wlan["name"] for ap in self.controller.api.devices.values() for wlan in ap.wlan_overrides if "name" in wlan } ) ssid_filter = {ssid: ssid for ssid in sorted(list(ssids))} return self.async_show_form( step_id="device_tracker", data_schema=vol.Schema( { vol.Optional( CONF_TRACK_CLIENTS, default=self.controller.option_track_clients, ): bool, vol.Optional( CONF_TRACK_WIRED_CLIENTS, default=self.controller.option_track_wired_clients, ): bool, vol.Optional( CONF_TRACK_DEVICES, default=self.controller.option_track_devices, ): bool, vol.Optional( CONF_SSID_FILTER, default=self.controller.option_ssid_filter ): cv.multi_select(ssid_filter), vol.Optional( CONF_DETECTION_TIME, default=int( self.controller.option_detection_time.total_seconds() ), ): int, vol.Optional( CONF_IGNORE_WIRED_BUG, default=self.controller.option_ignore_wired_bug, ): bool, } ), ) async def async_step_client_control(self, user_input=None): """Manage configuration of network access controlled clients.""" errors = {} if user_input is not None: self.options.update(user_input) return await self.async_step_statistics_sensors() clients_to_block = {} for client in self.controller.api.clients.values(): clients_to_block[ client.mac ] = f"{client.name or client.hostname} ({client.mac})" return self.async_show_form( step_id="client_control", data_schema=vol.Schema( { vol.Optional( CONF_BLOCK_CLIENT, default=self.options[CONF_BLOCK_CLIENT] ): cv.multi_select(clients_to_block), vol.Optional( CONF_POE_CLIENTS, default=self.options.get(CONF_POE_CLIENTS, DEFAULT_POE_CLIENTS), ): bool, } ), errors=errors, ) async def async_step_statistics_sensors(self, user_input=None): """Manage the statistics sensors options.""" if user_input is not None: self.options.update(user_input) return await self._update_options() return self.async_show_form( step_id="statistics_sensors", data_schema=vol.Schema( { vol.Optional( CONF_ALLOW_BANDWIDTH_SENSORS, default=self.controller.option_allow_bandwidth_sensors, ): bool, vol.Optional( CONF_ALLOW_UPTIME_SENSORS, default=self.controller.option_allow_uptime_sensors, ): bool, } ), ) async def _update_options(self): """Update config entry options.""" return self.async_create_entry(title="", data=self.options) async def async_discover_unifi(hass): """Discover UniFi address.""" try: return await hass.async_add_executor_job(socket.gethostbyname, "unifi") except socket.gaierror: return None
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/unifi/config_flow.py
"""Config flow for Logitech Squeezebox integration.""" import asyncio import logging from pysqueezebox import Server, async_discover import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, HTTP_UNAUTHORIZED, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession # pylint: disable=unused-import from .const import DEFAULT_PORT, DOMAIN _LOGGER = logging.getLogger(__name__) TIMEOUT = 5 def _base_schema(discovery_info=None): """Generate base schema.""" base_schema = {} if discovery_info and CONF_HOST in discovery_info: base_schema.update( { vol.Required( CONF_HOST, description={"suggested_value": discovery_info[CONF_HOST]}, ): str, } ) else: base_schema.update({vol.Required(CONF_HOST): str}) if discovery_info and CONF_PORT in discovery_info: base_schema.update( { vol.Required( CONF_PORT, default=DEFAULT_PORT, description={"suggested_value": discovery_info[CONF_PORT]}, ): int, } ) else: base_schema.update({vol.Required(CONF_PORT, default=DEFAULT_PORT): int}) base_schema.update( {vol.Optional(CONF_USERNAME): str, vol.Optional(CONF_PASSWORD): str} ) return vol.Schema(base_schema) class SqueezeboxConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Logitech Squeezebox.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize an instance of the squeezebox config flow.""" self.data_schema = _base_schema() self.discovery_info = None async def _discover(self, uuid=None): """Discover an unconfigured LMS server.""" self.discovery_info = None discovery_event = asyncio.Event() def _discovery_callback(server): if server.uuid: # ignore already configured uuids for entry in self._async_current_entries(): if entry.unique_id == server.uuid: return self.discovery_info = { CONF_HOST: server.host, CONF_PORT: server.port, "uuid": server.uuid, } _LOGGER.debug("Discovered server: %s", self.discovery_info) discovery_event.set() discovery_task = self.hass.async_create_task( async_discover(_discovery_callback) ) await discovery_event.wait() discovery_task.cancel() # stop searching as soon as we find server # update with suggested values from discovery self.data_schema = _base_schema(self.discovery_info) async def _validate_input(self, data): """ Validate the user input allows us to connect. Retrieve unique id and abort if already configured. """ server = Server( async_get_clientsession(self.hass), data[CONF_HOST], data[CONF_PORT], data.get(CONF_USERNAME), data.get(CONF_PASSWORD), ) try: status = await server.async_query("serverstatus") if not status: if server.http_status == HTTP_UNAUTHORIZED: return "invalid_auth" return "cannot_connect" except Exception: # pylint: disable=broad-except return "unknown" if "uuid" in status: await self.async_set_unique_id(status["uuid"]) self._abort_if_unique_id_configured() async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" errors = {} if user_input and CONF_HOST in user_input: # update with host provided by user self.data_schema = _base_schema(user_input) return await self.async_step_edit() # no host specified, see if we can discover an unconfigured LMS server try: await asyncio.wait_for(self._discover(), timeout=TIMEOUT) return await self.async_step_edit() except asyncio.TimeoutError: errors["base"] = "no_server_found" # display the form return self.async_show_form( step_id="user", data_schema=vol.Schema({vol.Optional(CONF_HOST): str}), errors=errors, ) async def async_step_edit(self, user_input=None): """Edit a discovered or manually inputted server.""" errors = {} if user_input: error = await self._validate_input(user_input) if not error: return self.async_create_entry( title=user_input[CONF_HOST], data=user_input ) errors["base"] = error return self.async_show_form( step_id="edit", data_schema=self.data_schema, errors=errors ) async def async_step_import(self, config): """Import a config flow from configuration.""" error = await self._validate_input(config) if error: return self.async_abort(reason=error) return self.async_create_entry(title=config[CONF_HOST], data=config) async def async_step_discovery(self, discovery_info): """Handle discovery.""" _LOGGER.debug("Reached discovery flow with info: %s", discovery_info) if "uuid" in discovery_info: await self.async_set_unique_id(discovery_info.pop("uuid")) self._abort_if_unique_id_configured() else: # attempt to connect to server and determine uuid. will fail if password required error = await self._validate_input(discovery_info) if error: await self._async_handle_discovery_without_unique_id() # update schema with suggested values from discovery self.data_schema = _base_schema(discovery_info) # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 self.context.update({"title_placeholders": {"host": discovery_info[CONF_HOST]}}) return await self.async_step_edit()
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/squeezebox/config_flow.py
"""Support for PoolSense binary sensors.""" from homeassistant.components.binary_sensor import ( DEVICE_CLASS_PROBLEM, BinarySensorEntity, ) from homeassistant.const import CONF_EMAIL from . import PoolSenseEntity from .const import DOMAIN BINARY_SENSORS = { "pH Status": { "unit": None, "icon": None, "name": "pH Status", "device_class": DEVICE_CLASS_PROBLEM, }, "Chlorine Status": { "unit": None, "icon": None, "name": "Chlorine Status", "device_class": DEVICE_CLASS_PROBLEM, }, } async def async_setup_entry(hass, config_entry, async_add_entities): """Defer sensor setup to the shared sensor module.""" coordinator = hass.data[DOMAIN][config_entry.entry_id] binary_sensors_list = [] for binary_sensor in BINARY_SENSORS: binary_sensors_list.append( PoolSenseBinarySensor( coordinator, config_entry.data[CONF_EMAIL], binary_sensor ) ) async_add_entities(binary_sensors_list, False) class PoolSenseBinarySensor(PoolSenseEntity, BinarySensorEntity): """Representation of PoolSense binary sensors.""" @property def is_on(self): """Return true if the binary sensor is on.""" return self.coordinator.data[self.info_type] == "red" @property def icon(self): """Return the icon.""" return BINARY_SENSORS[self.info_type]["icon"] @property def device_class(self): """Return the class of this device.""" return BINARY_SENSORS[self.info_type]["device_class"] @property def name(self): """Return the name of the binary sensor.""" return f"PoolSense {BINARY_SENSORS[self.info_type]['name']}"
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/poolsense/binary_sensor.py
"""Support for EnOcean switches.""" import voluptuous as vol from homeassistant.components.switch import PLATFORM_SCHEMA from homeassistant.const import CONF_ID, CONF_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import ToggleEntity from .device import EnOceanEntity CONF_CHANNEL = "channel" DEFAULT_NAME = "EnOcean Switch" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_ID): vol.All(cv.ensure_list, [vol.Coerce(int)]), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_CHANNEL, default=0): cv.positive_int, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the EnOcean switch platform.""" channel = config.get(CONF_CHANNEL) dev_id = config.get(CONF_ID) dev_name = config.get(CONF_NAME) add_entities([EnOceanSwitch(dev_id, dev_name, channel)]) class EnOceanSwitch(EnOceanEntity, ToggleEntity): """Representation of an EnOcean switch device.""" def __init__(self, dev_id, dev_name, channel): """Initialize the EnOcean switch device.""" super().__init__(dev_id, dev_name) self._light = None self._on_state = False self._on_state2 = False self.channel = channel @property def is_on(self): """Return whether the switch is on or off.""" return self._on_state @property def name(self): """Return the device name.""" return self.dev_name def turn_on(self, **kwargs): """Turn on the switch.""" optional = [0x03] optional.extend(self.dev_id) optional.extend([0xFF, 0x00]) self.send_command( data=[0xD2, 0x01, self.channel & 0xFF, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00], optional=optional, packet_type=0x01, ) self._on_state = True def turn_off(self, **kwargs): """Turn off the switch.""" optional = [0x03] optional.extend(self.dev_id) optional.extend([0xFF, 0x00]) self.send_command( data=[0xD2, 0x01, self.channel & 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], optional=optional, packet_type=0x01, ) self._on_state = False def value_changed(self, packet): """Update the internal state of the switch.""" if packet.data[0] == 0xA5: # power meter telegram, turn on if > 10 watts packet.parse_eep(0x12, 0x01) if packet.parsed["DT"]["raw_value"] == 1: raw_val = packet.parsed["MR"]["raw_value"] divisor = packet.parsed["DIV"]["raw_value"] watts = raw_val / (10 ** divisor) if watts > 1: self._on_state = True self.schedule_update_ha_state() elif packet.data[0] == 0xD2: # actuator status telegram packet.parse_eep(0x01, 0x01) if packet.parsed["CMD"]["raw_value"] == 4: channel = packet.parsed["IO"]["raw_value"] output = packet.parsed["OV"]["raw_value"] if channel == self.channel: self._on_state = output > 0 self.schedule_update_ha_state()
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/enocean/switch.py
"""Support for Agent.""" import asyncio from agent import AgentError from agent.a import Agent from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONNECTION, DOMAIN as AGENT_DOMAIN, SERVER_URL ATTRIBUTION = "ispyconnect.com" DEFAULT_BRAND = "Agent DVR by ispyconnect.com" FORWARDS = ["alarm_control_panel", "camera"] async def async_setup(hass, config): """Old way to set up integrations.""" return True async def async_setup_entry(hass, config_entry): """Set up the Agent component.""" hass.data.setdefault(AGENT_DOMAIN, {}) server_origin = config_entry.data[SERVER_URL] agent_client = Agent(server_origin, async_get_clientsession(hass)) try: await agent_client.update() except AgentError as err: await agent_client.close() raise ConfigEntryNotReady from err if not agent_client.is_available: raise ConfigEntryNotReady await agent_client.get_devices() hass.data[AGENT_DOMAIN][config_entry.entry_id] = {CONNECTION: agent_client} device_registry = await dr.async_get_registry(hass) device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, identifiers={(AGENT_DOMAIN, agent_client.unique)}, manufacturer="iSpyConnect", name=f"Agent {agent_client.name}", model="Agent DVR", sw_version=agent_client.version, ) for forward in FORWARDS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, forward) ) return True async def async_unload_entry(hass, config_entry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(config_entry, forward) for forward in FORWARDS ] ) ) await hass.data[AGENT_DOMAIN][config_entry.entry_id][CONNECTION].close() if unload_ok: hass.data[AGENT_DOMAIN].pop(config_entry.entry_id) return unload_ok
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/agent_dvr/__init__.py
"""The cert_expiry component.""" from datetime import datetime, timedelta import logging from typing import Optional from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.typing import HomeAssistantType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DEFAULT_PORT, DOMAIN from .errors import TemporaryFailure, ValidationFailure from .helper import get_cert_expiry_timestamp _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(hours=12) async def async_setup(hass, config): """Platform setup, do nothing.""" return True async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry): """Load the saved entities.""" host = entry.data[CONF_HOST] port = entry.data[CONF_PORT] coordinator = CertExpiryDataUpdateCoordinator(hass, host, port) await coordinator.async_refresh() if not coordinator.last_update_success: raise ConfigEntryNotReady hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.entry_id] = coordinator if entry.unique_id is None: hass.config_entries.async_update_entry(entry, unique_id=f"{host}:{port}") hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, "sensor") ) return True async def async_unload_entry(hass, entry): """Unload a config entry.""" return await hass.config_entries.async_forward_entry_unload(entry, "sensor") class CertExpiryDataUpdateCoordinator(DataUpdateCoordinator[datetime]): """Class to manage fetching Cert Expiry data from single endpoint.""" def __init__(self, hass, host, port): """Initialize global Cert Expiry data updater.""" self.host = host self.port = port self.cert_error = None self.is_cert_valid = False display_port = f":{port}" if port != DEFAULT_PORT else "" name = f"{self.host}{display_port}" super().__init__( hass, _LOGGER, name=name, update_interval=SCAN_INTERVAL, ) async def _async_update_data(self) -> Optional[datetime]: """Fetch certificate.""" try: timestamp = await get_cert_expiry_timestamp(self.hass, self.host, self.port) except TemporaryFailure as err: raise UpdateFailed(err.args[0]) from err except ValidationFailure as err: self.cert_error = err self.is_cert_valid = False _LOGGER.error("Certificate validation error: %s [%s]", self.host, err) return None self.cert_error = None self.is_cert_valid = True return timestamp
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/cert_expiry/__init__.py
"""Support for consuming values for the Volkszaehler API.""" from datetime import timedelta import logging from volkszaehler import Volkszaehler from volkszaehler.exceptions import VolkszaehlerApiConnectionError import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_HOST, CONF_MONITORED_CONDITIONS, CONF_NAME, CONF_PORT, ENERGY_WATT_HOUR, POWER_WATT, ) from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) CONF_UUID = "uuid" DEFAULT_HOST = "localhost" DEFAULT_NAME = "Volkszaehler" DEFAULT_PORT = 80 MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=1) SENSOR_TYPES = { "average": ["Average", POWER_WATT, "mdi:power-off"], "consumption": ["Consumption", ENERGY_WATT_HOUR, "mdi:power-plug"], "max": ["Max", POWER_WATT, "mdi:arrow-up"], "min": ["Min", POWER_WATT, "mdi:arrow-down"], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_UUID): cv.string, vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_MONITORED_CONDITIONS, default=["average"]): vol.All( cv.ensure_list, [vol.In(SENSOR_TYPES)] ), } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Volkszaehler sensors.""" host = config[CONF_HOST] name = config[CONF_NAME] port = config[CONF_PORT] uuid = config[CONF_UUID] conditions = config[CONF_MONITORED_CONDITIONS] session = async_get_clientsession(hass) vz_api = VolkszaehlerData( Volkszaehler(hass.loop, session, uuid, host=host, port=port) ) await vz_api.async_update() if vz_api.api.data is None: raise PlatformNotReady dev = [] for condition in conditions: dev.append(VolkszaehlerSensor(vz_api, name, condition)) async_add_entities(dev, True) class VolkszaehlerSensor(Entity): """Implementation of a Volkszaehler sensor.""" def __init__(self, vz_api, name, sensor_type): """Initialize the Volkszaehler sensor.""" self.vz_api = vz_api self._name = name self.type = sensor_type self._state = None @property def name(self): """Return the name of the sensor.""" return "{} {}".format(self._name, SENSOR_TYPES[self.type][0]) @property def icon(self): """Icon to use in the frontend, if any.""" return SENSOR_TYPES[self.type][2] @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return SENSOR_TYPES[self.type][1] @property def available(self): """Could the device be accessed during the last update call.""" return self.vz_api.available @property def state(self): """Return the state of the resources.""" return self._state async def async_update(self): """Get the latest data from REST API.""" await self.vz_api.async_update() if self.vz_api.api.data is not None: self._state = round(getattr(self.vz_api.api, self.type), 2) class VolkszaehlerData: """The class for handling the data retrieval from the Volkszaehler API.""" def __init__(self, api): """Initialize the data object.""" self.api = api self.available = True @Throttle(MIN_TIME_BETWEEN_UPDATES) async def async_update(self): """Get the latest data from the Volkszaehler REST API.""" try: await self.api.get_data() self.available = True except VolkszaehlerApiConnectionError: _LOGGER.error("Unable to fetch data from the Volkszaehler API") self.available = False
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/volkszaehler/sensor.py
"""Device tracker constants.""" from datetime import timedelta import logging LOGGER = logging.getLogger(__package__) DOMAIN = "device_tracker" PLATFORM_TYPE_LEGACY = "legacy" PLATFORM_TYPE_ENTITY = "entity_platform" SOURCE_TYPE_GPS = "gps" SOURCE_TYPE_ROUTER = "router" SOURCE_TYPE_BLUETOOTH = "bluetooth" SOURCE_TYPE_BLUETOOTH_LE = "bluetooth_le" CONF_SCAN_INTERVAL = "interval_seconds" SCAN_INTERVAL = timedelta(seconds=12) CONF_TRACK_NEW = "track_new_devices" DEFAULT_TRACK_NEW = True CONF_CONSIDER_HOME = "consider_home" DEFAULT_CONSIDER_HOME = timedelta(seconds=180) CONF_NEW_DEVICE_DEFAULTS = "new_device_defaults" ATTR_ATTRIBUTES = "attributes" ATTR_BATTERY = "battery" ATTR_DEV_ID = "dev_id" ATTR_GPS = "gps" ATTR_HOST_NAME = "host_name" ATTR_LOCATION_NAME = "location_name" ATTR_MAC = "mac" ATTR_SOURCE_TYPE = "source_type" ATTR_CONSIDER_HOME = "consider_home"
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/device_tracker/const.py
"""The Netatmo data handler.""" from collections import deque from datetime import timedelta from functools import partial from itertools import islice import logging from time import time from typing import Deque, Dict, List import pyatmo from homeassistant.config_entries import ConfigEntry from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.event import async_track_time_interval from .const import AUTH, DOMAIN, MANUFACTURER _LOGGER = logging.getLogger(__name__) CAMERA_DATA_CLASS_NAME = "CameraData" WEATHERSTATION_DATA_CLASS_NAME = "WeatherStationData" HOMECOACH_DATA_CLASS_NAME = "HomeCoachData" HOMEDATA_DATA_CLASS_NAME = "HomeData" HOMESTATUS_DATA_CLASS_NAME = "HomeStatus" PUBLICDATA_DATA_CLASS_NAME = "PublicData" NEXT_SCAN = "next_scan" DATA_CLASSES = { WEATHERSTATION_DATA_CLASS_NAME: pyatmo.WeatherStationData, HOMECOACH_DATA_CLASS_NAME: pyatmo.HomeCoachData, CAMERA_DATA_CLASS_NAME: pyatmo.CameraData, HOMEDATA_DATA_CLASS_NAME: pyatmo.HomeData, HOMESTATUS_DATA_CLASS_NAME: pyatmo.HomeStatus, PUBLICDATA_DATA_CLASS_NAME: pyatmo.PublicData, } BATCH_SIZE = 3 DEFAULT_INTERVALS = { HOMEDATA_DATA_CLASS_NAME: 900, HOMESTATUS_DATA_CLASS_NAME: 300, CAMERA_DATA_CLASS_NAME: 900, WEATHERSTATION_DATA_CLASS_NAME: 600, HOMECOACH_DATA_CLASS_NAME: 300, PUBLICDATA_DATA_CLASS_NAME: 600, } SCAN_INTERVAL = 60 class NetatmoDataHandler: """Manages the Netatmo data handling.""" def __init__(self, hass: HomeAssistant, entry: ConfigEntry): """Initialize self.""" self.hass = hass self._auth = hass.data[DOMAIN][entry.entry_id][AUTH] self.listeners: List[CALLBACK_TYPE] = [] self._data_classes: Dict = {} self.data = {} self._queue: Deque = deque() self._webhook: bool = False async def async_setup(self): """Set up the Netatmo data handler.""" async_track_time_interval( self.hass, self.async_update, timedelta(seconds=SCAN_INTERVAL) ) self.listeners.append( async_dispatcher_connect( self.hass, f"signal-{DOMAIN}-webhook-None", self.handle_event, ) ) async def async_update(self, event_time): """ Update device. We do up to BATCH_SIZE calls in one update in order to minimize the calls on the api service. """ for data_class in islice(self._queue, 0, BATCH_SIZE): if data_class[NEXT_SCAN] > time(): continue self._data_classes[data_class["name"]][NEXT_SCAN] = ( time() + data_class["interval"] ) await self.async_fetch_data( data_class["class"], data_class["name"], **data_class["kwargs"] ) self._queue.rotate(BATCH_SIZE) async def async_cleanup(self): """Clean up the Netatmo data handler.""" for listener in self.listeners: listener() async def handle_event(self, event): """Handle webhook events.""" if event["data"]["push_type"] == "webhook_activation": _LOGGER.info("%s webhook successfully registered", MANUFACTURER) self._webhook = True elif event["data"]["push_type"] == "NACamera-connection": _LOGGER.debug("%s camera reconnected", MANUFACTURER) self._data_classes[CAMERA_DATA_CLASS_NAME][NEXT_SCAN] = time() async def async_fetch_data(self, data_class, data_class_entry, **kwargs): """Fetch data and notify.""" try: self.data[data_class_entry] = await self.hass.async_add_executor_job( partial(data_class, **kwargs), self._auth, ) for update_callback in self._data_classes[data_class_entry][ "subscriptions" ]: if update_callback: update_callback() except (pyatmo.NoDevice, pyatmo.ApiError) as err: _LOGGER.debug(err) async def register_data_class( self, data_class_name, data_class_entry, update_callback, **kwargs ): """Register data class.""" if data_class_entry in self._data_classes: self._data_classes[data_class_entry]["subscriptions"].append( update_callback ) return self._data_classes[data_class_entry] = { "class": DATA_CLASSES[data_class_name], "name": data_class_entry, "interval": DEFAULT_INTERVALS[data_class_name], NEXT_SCAN: time() + DEFAULT_INTERVALS[data_class_name], "kwargs": kwargs, "subscriptions": [update_callback], } await self.async_fetch_data( DATA_CLASSES[data_class_name], data_class_entry, **kwargs ) self._queue.append(self._data_classes[data_class_entry]) _LOGGER.debug("Data class %s added", data_class_entry) async def unregister_data_class(self, data_class_entry, update_callback): """Unregister data class.""" if update_callback not in self._data_classes[data_class_entry]["subscriptions"]: return self._data_classes[data_class_entry]["subscriptions"].remove(update_callback) if not self._data_classes[data_class_entry].get("subscriptions"): self._queue.remove(self._data_classes[data_class_entry]) self._data_classes.pop(data_class_entry) _LOGGER.debug("Data class %s removed", data_class_entry) @property def webhook(self) -> bool: """Return the webhook state.""" return self._webhook
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/netatmo/data_handler.py
"""Support for Enviro pHAT sensors.""" from datetime import timedelta import importlib import logging import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_DISPLAY_OPTIONS, CONF_NAME, PRESSURE_HPA, TEMP_CELSIUS, VOLT, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "envirophat" CONF_USE_LEDS = "use_leds" MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60) SENSOR_TYPES = { "light": ["light", " ", "mdi:weather-sunny"], "light_red": ["light_red", " ", "mdi:invert-colors"], "light_green": ["light_green", " ", "mdi:invert-colors"], "light_blue": ["light_blue", " ", "mdi:invert-colors"], "accelerometer_x": ["accelerometer_x", "G", "mdi:earth"], "accelerometer_y": ["accelerometer_y", "G", "mdi:earth"], "accelerometer_z": ["accelerometer_z", "G", "mdi:earth"], "magnetometer_x": ["magnetometer_x", " ", "mdi:magnet"], "magnetometer_y": ["magnetometer_y", " ", "mdi:magnet"], "magnetometer_z": ["magnetometer_z", " ", "mdi:magnet"], "temperature": ["temperature", TEMP_CELSIUS, "mdi:thermometer"], "pressure": ["pressure", PRESSURE_HPA, "mdi:gauge"], "voltage_0": ["voltage_0", VOLT, "mdi:flash"], "voltage_1": ["voltage_1", VOLT, "mdi:flash"], "voltage_2": ["voltage_2", VOLT, "mdi:flash"], "voltage_3": ["voltage_3", VOLT, "mdi:flash"], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_DISPLAY_OPTIONS, default=list(SENSOR_TYPES)): [ vol.In(SENSOR_TYPES) ], vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_USE_LEDS, default=False): cv.boolean, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Sense HAT sensor platform.""" try: envirophat = importlib.import_module("envirophat") except OSError: _LOGGER.error("No Enviro pHAT was found") return False data = EnvirophatData(envirophat, config.get(CONF_USE_LEDS)) dev = [] for variable in config[CONF_DISPLAY_OPTIONS]: dev.append(EnvirophatSensor(data, variable)) add_entities(dev, True) class EnvirophatSensor(Entity): """Representation of an Enviro pHAT sensor.""" def __init__(self, data, sensor_types): """Initialize the sensor.""" self.data = data self._name = SENSOR_TYPES[sensor_types][0] self._unit_of_measurement = SENSOR_TYPES[sensor_types][1] self.type = sensor_types self._state = None @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def icon(self): """Icon to use in the frontend, if any.""" return SENSOR_TYPES[self.type][2] @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return self._unit_of_measurement def update(self): """Get the latest data and updates the states.""" self.data.update() if self.type == "light": self._state = self.data.light if self.type == "light_red": self._state = self.data.light_red if self.type == "light_green": self._state = self.data.light_green if self.type == "light_blue": self._state = self.data.light_blue if self.type == "accelerometer_x": self._state = self.data.accelerometer_x if self.type == "accelerometer_y": self._state = self.data.accelerometer_y if self.type == "accelerometer_z": self._state = self.data.accelerometer_z if self.type == "magnetometer_x": self._state = self.data.magnetometer_x if self.type == "magnetometer_y": self._state = self.data.magnetometer_y if self.type == "magnetometer_z": self._state = self.data.magnetometer_z if self.type == "temperature": self._state = self.data.temperature if self.type == "pressure": self._state = self.data.pressure if self.type == "voltage_0": self._state = self.data.voltage_0 if self.type == "voltage_1": self._state = self.data.voltage_1 if self.type == "voltage_2": self._state = self.data.voltage_2 if self.type == "voltage_3": self._state = self.data.voltage_3 class EnvirophatData: """Get the latest data and update.""" def __init__(self, envirophat, use_leds): """Initialize the data object.""" self.envirophat = envirophat self.use_leds = use_leds # sensors readings self.light = None self.light_red = None self.light_green = None self.light_blue = None self.accelerometer_x = None self.accelerometer_y = None self.accelerometer_z = None self.magnetometer_x = None self.magnetometer_y = None self.magnetometer_z = None self.temperature = None self.pressure = None self.voltage_0 = None self.voltage_1 = None self.voltage_2 = None self.voltage_3 = None @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data from Enviro pHAT.""" # Light sensor reading: 16-bit integer self.light = self.envirophat.light.light() if self.use_leds: self.envirophat.leds.on() # the three color values scaled against the overall light, 0-255 self.light_red, self.light_green, self.light_blue = self.envirophat.light.rgb() if self.use_leds: self.envirophat.leds.off() # accelerometer readings in G ( self.accelerometer_x, self.accelerometer_y, self.accelerometer_z, ) = self.envirophat.motion.accelerometer() # raw magnetometer reading ( self.magnetometer_x, self.magnetometer_y, self.magnetometer_z, ) = self.envirophat.motion.magnetometer() # temperature resolution of BMP280 sensor: 0.01°C self.temperature = round(self.envirophat.weather.temperature(), 2) # pressure resolution of BMP280 sensor: 0.16 Pa, rounding to 0.1 Pa # with conversion to 100 Pa = 1 hPa self.pressure = round(self.envirophat.weather.pressure() / 100.0, 3) # Voltage sensor, reading between 0-3.3V ( self.voltage_0, self.voltage_1, self.voltage_2, self.voltage_3, ) = self.envirophat.analog.read_all()
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/envirophat/sensor.py
"""Switch platform integration for Numato USB GPIO expanders.""" import logging from numato_gpio import NumatoGpioError from homeassistant.const import ( CONF_DEVICES, CONF_ID, CONF_SWITCHES, DEVICE_DEFAULT_NAME, ) from homeassistant.helpers.entity import ToggleEntity from . import CONF_INVERT_LOGIC, CONF_PORTS, DATA_API, DOMAIN _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the configured Numato USB GPIO switch ports.""" if discovery_info is None: return api = hass.data[DOMAIN][DATA_API] switches = [] devices = hass.data[DOMAIN][CONF_DEVICES] for device in [d for d in devices if CONF_SWITCHES in d]: device_id = device[CONF_ID] platform = device[CONF_SWITCHES] invert_logic = platform[CONF_INVERT_LOGIC] ports = platform[CONF_PORTS] for port, port_name in ports.items(): try: api.setup_output(device_id, port) api.write_output(device_id, port, 1 if invert_logic else 0) except NumatoGpioError as err: _LOGGER.error( "Failed to initialize switch '%s' on Numato device %s port %s: %s", port_name, device_id, port, err, ) continue switches.append( NumatoGpioSwitch( port_name, device_id, port, invert_logic, api, ) ) add_entities(switches, True) class NumatoGpioSwitch(ToggleEntity): """Representation of a Numato USB GPIO switch port.""" def __init__(self, name, device_id, port, invert_logic, api): """Initialize the port.""" self._name = name or DEVICE_DEFAULT_NAME self._device_id = device_id self._port = port self._invert_logic = invert_logic self._state = False self._api = api @property def name(self): """Return the name of the switch.""" return self._name @property def should_poll(self): """No polling needed.""" return False @property def is_on(self): """Return true if port is turned on.""" return self._state def turn_on(self, **kwargs): """Turn the port on.""" try: self._api.write_output( self._device_id, self._port, 0 if self._invert_logic else 1 ) self._state = True self.schedule_update_ha_state() except NumatoGpioError as err: _LOGGER.error( "Failed to turn on Numato device %s port %s: %s", self._device_id, self._port, err, ) def turn_off(self, **kwargs): """Turn the port off.""" try: self._api.write_output( self._device_id, self._port, 1 if self._invert_logic else 0 ) self._state = False self.schedule_update_ha_state() except NumatoGpioError as err: _LOGGER.error( "Failed to turn off Numato device %s port %s: %s", self._device_id, self._port, err, )
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/numato/switch.py
"""Tracking for bluetooth devices.""" import asyncio import logging from typing import List, Optional, Set, Tuple # pylint: disable=import-error import bluetooth from bt_proximity import BluetoothRSSI import voluptuous as vol from homeassistant.components.device_tracker import PLATFORM_SCHEMA from homeassistant.components.device_tracker.const import ( CONF_SCAN_INTERVAL, CONF_TRACK_NEW, DEFAULT_TRACK_NEW, SCAN_INTERVAL, SOURCE_TYPE_BLUETOOTH, ) from homeassistant.components.device_tracker.legacy import ( YAML_DEVICES, async_load_config, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.typing import HomeAssistantType from .const import DOMAIN, SERVICE_UPDATE _LOGGER = logging.getLogger(__name__) BT_PREFIX = "BT_" CONF_REQUEST_RSSI = "request_rssi" CONF_DEVICE_ID = "device_id" DEFAULT_DEVICE_ID = -1 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_TRACK_NEW): cv.boolean, vol.Optional(CONF_REQUEST_RSSI): cv.boolean, vol.Optional(CONF_DEVICE_ID, default=DEFAULT_DEVICE_ID): vol.All( vol.Coerce(int), vol.Range(min=-1) ), } ) def is_bluetooth_device(device) -> bool: """Check whether a device is a bluetooth device by its mac.""" return device.mac and device.mac[:3].upper() == BT_PREFIX def discover_devices(device_id: int) -> List[Tuple[str, str]]: """Discover Bluetooth devices.""" result = bluetooth.discover_devices( duration=8, lookup_names=True, flush_cache=True, lookup_class=False, device_id=device_id, ) _LOGGER.debug("Bluetooth devices discovered = %d", len(result)) return result async def see_device( hass: HomeAssistantType, async_see, mac: str, device_name: str, rssi=None ) -> None: """Mark a device as seen.""" attributes = {} if rssi is not None: attributes["rssi"] = rssi await async_see( mac=f"{BT_PREFIX}{mac}", host_name=device_name, attributes=attributes, source_type=SOURCE_TYPE_BLUETOOTH, ) async def get_tracking_devices(hass: HomeAssistantType) -> Tuple[Set[str], Set[str]]: """ Load all known devices. We just need the devices so set consider_home and home range to 0 """ yaml_path: str = hass.config.path(YAML_DEVICES) devices = await async_load_config(yaml_path, hass, 0) bluetooth_devices = [device for device in devices if is_bluetooth_device(device)] devices_to_track: Set[str] = { device.mac[3:] for device in bluetooth_devices if device.track } devices_to_not_track: Set[str] = { device.mac[3:] for device in bluetooth_devices if not device.track } return devices_to_track, devices_to_not_track def lookup_name(mac: str) -> Optional[str]: """Lookup a Bluetooth device name.""" _LOGGER.debug("Scanning %s", mac) return bluetooth.lookup_name(mac, timeout=5) async def async_setup_scanner( hass: HomeAssistantType, config: dict, async_see, discovery_info=None ): """Set up the Bluetooth Scanner.""" device_id: int = config[CONF_DEVICE_ID] interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL) request_rssi = config.get(CONF_REQUEST_RSSI, False) update_bluetooth_lock = asyncio.Lock() # If track new devices is true discover new devices on startup. track_new: bool = config.get(CONF_TRACK_NEW, DEFAULT_TRACK_NEW) _LOGGER.debug("Tracking new devices is set to %s", track_new) devices_to_track, devices_to_not_track = await get_tracking_devices(hass) if not devices_to_track and not track_new: _LOGGER.debug("No Bluetooth devices to track and not tracking new devices") if request_rssi: _LOGGER.debug("Detecting RSSI for devices") async def perform_bluetooth_update(): """Discover Bluetooth devices and update status.""" _LOGGER.debug("Performing Bluetooth devices discovery and update") tasks = [] try: if track_new: devices = await hass.async_add_executor_job(discover_devices, device_id) for mac, device_name in devices: if mac not in devices_to_track and mac not in devices_to_not_track: devices_to_track.add(mac) for mac in devices_to_track: device_name = await hass.async_add_executor_job(lookup_name, mac) if device_name is None: # Could not lookup device name continue rssi = None if request_rssi: client = BluetoothRSSI(mac) rssi = await hass.async_add_executor_job(client.request_rssi) client.close() tasks.append(see_device(hass, async_see, mac, device_name, rssi)) if tasks: await asyncio.wait(tasks) except bluetooth.BluetoothError: _LOGGER.exception("Error looking up Bluetooth device") async def update_bluetooth(now=None): """Lookup Bluetooth devices and update status.""" # If an update is in progress, we don't do anything if update_bluetooth_lock.locked(): _LOGGER.debug( "Previous execution of update_bluetooth is taking longer than the scheduled update of interval %s", interval, ) return async with update_bluetooth_lock: await perform_bluetooth_update() async def handle_manual_update_bluetooth(call): """Update bluetooth devices on demand.""" await update_bluetooth() hass.async_create_task(update_bluetooth()) async_track_time_interval(hass, update_bluetooth, interval) hass.services.async_register(DOMAIN, SERVICE_UPDATE, handle_manual_update_bluetooth) return True
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/bluetooth_tracker/device_tracker.py
"""Config flow for DialogFlow.""" from homeassistant.helpers import config_entry_flow from .const import DOMAIN config_entry_flow.register_webhook_flow( DOMAIN, "Dialogflow Webhook", { "dialogflow_url": "https://dialogflow.com/docs/fulfillment#webhook", "docs_url": "https://www.home-assistant.io/integrations/dialogflow/", }, )
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/dialogflow/config_flow.py
"""Support for the DirecTV receivers.""" import logging from typing import Callable, List, Optional from directv import DIRECTV from homeassistant.components.media_player import ( DEVICE_CLASS_RECEIVER, MediaPlayerEntity, ) from homeassistant.components.media_player.const import ( MEDIA_TYPE_CHANNEL, MEDIA_TYPE_MOVIE, MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_STOP, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_OFF, STATE_PAUSED, STATE_PLAYING from homeassistant.helpers.typing import HomeAssistantType from homeassistant.util import dt as dt_util from . import DIRECTVEntity from .const import ( ATTR_MEDIA_CURRENTLY_RECORDING, ATTR_MEDIA_RATING, ATTR_MEDIA_RECORDED, ATTR_MEDIA_START_TIME, DOMAIN, ) _LOGGER = logging.getLogger(__name__) KNOWN_MEDIA_TYPES = [MEDIA_TYPE_MOVIE, MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW] SUPPORT_DTV = ( SUPPORT_PAUSE | SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PLAY_MEDIA | SUPPORT_STOP | SUPPORT_NEXT_TRACK | SUPPORT_PREVIOUS_TRACK | SUPPORT_PLAY ) SUPPORT_DTV_CLIENT = ( SUPPORT_PAUSE | SUPPORT_PLAY_MEDIA | SUPPORT_STOP | SUPPORT_NEXT_TRACK | SUPPORT_PREVIOUS_TRACK | SUPPORT_PLAY ) async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities: Callable[[List, bool], None], ) -> bool: """Set up the DirecTV config entry.""" dtv = hass.data[DOMAIN][entry.entry_id] entities = [] for location in dtv.device.locations: entities.append( DIRECTVMediaPlayer( dtv=dtv, name=str.title(location.name), address=location.address, ) ) async_add_entities(entities, True) class DIRECTVMediaPlayer(DIRECTVEntity, MediaPlayerEntity): """Representation of a DirecTV receiver on the network.""" def __init__(self, *, dtv: DIRECTV, name: str, address: str = "0") -> None: """Initialize DirecTV media player.""" super().__init__( dtv=dtv, name=name, address=address, ) self._assumed_state = None self._available = False self._is_recorded = None self._is_standby = True self._last_position = None self._last_update = None self._paused = None self._program = None self._state = None async def async_update(self): """Retrieve latest state.""" self._state = await self.dtv.state(self._address) self._available = self._state.available self._is_standby = self._state.standby self._program = self._state.program if self._is_standby: self._assumed_state = False self._is_recorded = None self._last_position = None self._last_update = None self._paused = None elif self._program is not None: self._paused = self._last_position == self._program.position self._is_recorded = self._program.recorded self._last_position = self._program.position self._last_update = self._state.at self._assumed_state = self._is_recorded @property def device_state_attributes(self): """Return device specific state attributes.""" if self._is_standby: return {} return { ATTR_MEDIA_CURRENTLY_RECORDING: self.media_currently_recording, ATTR_MEDIA_RATING: self.media_rating, ATTR_MEDIA_RECORDED: self.media_recorded, ATTR_MEDIA_START_TIME: self.media_start_time, } @property def name(self): """Return the name of the device.""" return self._name @property def device_class(self) -> Optional[str]: """Return the class of this device.""" return DEVICE_CLASS_RECEIVER @property def unique_id(self): """Return a unique ID to use for this media player.""" if self._address == "0": return self.dtv.device.info.receiver_id return self._address # MediaPlayerEntity properties and methods @property def state(self): """Return the state of the device.""" if self._is_standby: return STATE_OFF # For recorded media we can determine if it is paused or not. # For live media we're unable to determine and will always return # playing instead. if self._paused: return STATE_PAUSED return STATE_PLAYING @property def available(self): """Return if able to retrieve information from DVR or not.""" return self._available @property def assumed_state(self): """Return if we assume the state or not.""" return self._assumed_state @property def media_content_id(self): """Return the content ID of current playing media.""" if self._is_standby or self._program is None: return None return self._program.program_id @property def media_content_type(self): """Return the content type of current playing media.""" if self._is_standby or self._program is None: return None if self._program.program_type in KNOWN_MEDIA_TYPES: return self._program.program_type return MEDIA_TYPE_MOVIE @property def media_duration(self): """Return the duration of current playing media in seconds.""" if self._is_standby or self._program is None: return None return self._program.duration @property def media_position(self): """Position of current playing media in seconds.""" if self._is_standby: return None return self._last_position @property def media_position_updated_at(self): """When was the position of the current playing media valid.""" if self._is_standby: return None return self._last_update @property def media_title(self): """Return the title of current playing media.""" if self._is_standby or self._program is None: return None if self.media_content_type == MEDIA_TYPE_MUSIC: return self._program.music_title return self._program.title @property def media_artist(self): """Artist of current playing media, music track only.""" if self._is_standby or self._program is None: return None return self._program.music_artist @property def media_album_name(self): """Album name of current playing media, music track only.""" if self._is_standby or self._program is None: return None return self._program.music_album @property def media_series_title(self): """Return the title of current episode of TV show.""" if self._is_standby or self._program is None: return None return self._program.episode_title @property def media_channel(self): """Return the channel current playing media.""" if self._is_standby or self._program is None: return None return f"{self._program.channel_name} ({self._program.channel})" @property def source(self): """Name of the current input source.""" if self._is_standby or self._program is None: return None return self._program.channel @property def supported_features(self): """Flag media player features that are supported.""" return SUPPORT_DTV_CLIENT if self._is_client else SUPPORT_DTV @property def media_currently_recording(self): """If the media is currently being recorded or not.""" if self._is_standby or self._program is None: return None return self._program.recording @property def media_rating(self): """TV Rating of the current playing media.""" if self._is_standby or self._program is None: return None return self._program.rating @property def media_recorded(self): """If the media was recorded or live.""" if self._is_standby: return None return self._is_recorded @property def media_start_time(self): """Start time the program aired.""" if self._is_standby or self._program is None: return None return dt_util.as_local(self._program.start_time) async def async_turn_on(self): """Turn on the receiver.""" if self._is_client: raise NotImplementedError() _LOGGER.debug("Turn on %s", self._name) await self.dtv.remote("poweron", self._address) async def async_turn_off(self): """Turn off the receiver.""" if self._is_client: raise NotImplementedError() _LOGGER.debug("Turn off %s", self._name) await self.dtv.remote("poweroff", self._address) async def async_media_play(self): """Send play command.""" _LOGGER.debug("Play on %s", self._name) await self.dtv.remote("play", self._address) async def async_media_pause(self): """Send pause command.""" _LOGGER.debug("Pause on %s", self._name) await self.dtv.remote("pause", self._address) async def async_media_stop(self): """Send stop command.""" _LOGGER.debug("Stop on %s", self._name) await self.dtv.remote("stop", self._address) async def async_media_previous_track(self): """Send rewind command.""" _LOGGER.debug("Rewind on %s", self._name) await self.dtv.remote("rew", self._address) async def async_media_next_track(self): """Send fast forward command.""" _LOGGER.debug("Fast forward on %s", self._name) await self.dtv.remote("ffwd", self._address) async def async_play_media(self, media_type, media_id, **kwargs): """Select input source.""" if media_type != MEDIA_TYPE_CHANNEL: _LOGGER.error( "Invalid media type %s. Only %s is supported", media_type, MEDIA_TYPE_CHANNEL, ) return _LOGGER.debug("Changing channel on %s to %s", self._name, media_id) await self.dtv.tune(media_id, self._address)
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/directv/media_player.py
"""Component for interacting with the Yale Smart Alarm System API.""" import logging import voluptuous as vol from yalesmartalarmclient.client import ( YALE_STATE_ARM_FULL, YALE_STATE_ARM_PARTIAL, YALE_STATE_DISARM, AuthenticationError, YaleSmartAlarmClient, ) from homeassistant.components.alarm_control_panel import ( PLATFORM_SCHEMA, AlarmControlPanelEntity, ) from homeassistant.components.alarm_control_panel.const import ( SUPPORT_ALARM_ARM_AWAY, SUPPORT_ALARM_ARM_HOME, ) from homeassistant.const import ( CONF_NAME, CONF_PASSWORD, CONF_USERNAME, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED, ) import homeassistant.helpers.config_validation as cv CONF_AREA_ID = "area_id" DEFAULT_NAME = "Yale Smart Alarm" DEFAULT_AREA_ID = "1" _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_AREA_ID, default=DEFAULT_AREA_ID): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the alarm platform.""" name = config[CONF_NAME] username = config[CONF_USERNAME] password = config[CONF_PASSWORD] area_id = config[CONF_AREA_ID] try: client = YaleSmartAlarmClient(username, password, area_id) except AuthenticationError: _LOGGER.error("Authentication failed. Check credentials") return add_entities([YaleAlarmDevice(name, client)], True) class YaleAlarmDevice(AlarmControlPanelEntity): """Represent a Yale Smart Alarm.""" def __init__(self, name, client): """Initialize the Yale Alarm Device.""" self._name = name self._client = client self._state = None self._state_map = { YALE_STATE_DISARM: STATE_ALARM_DISARMED, YALE_STATE_ARM_PARTIAL: STATE_ALARM_ARMED_HOME, YALE_STATE_ARM_FULL: STATE_ALARM_ARMED_AWAY, } @property def name(self): """Return the name of the device.""" return self._name @property def state(self): """Return the state of the device.""" return self._state @property def supported_features(self) -> int: """Return the list of supported features.""" return SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY def update(self): """Return the state of the device.""" armed_status = self._client.get_armed_status() self._state = self._state_map.get(armed_status) def alarm_disarm(self, code=None): """Send disarm command.""" self._client.disarm() def alarm_arm_home(self, code=None): """Send arm home command.""" self._client.arm_partial() def alarm_arm_away(self, code=None): """Send arm away command.""" self._client.arm_full()
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/yale_smart_alarm/alarm_control_panel.py
"""Support for LCN climate control.""" import pypck from homeassistant.components.climate import ClimateEntity, const from homeassistant.const import ATTR_TEMPERATURE, CONF_ADDRESS, CONF_UNIT_OF_MEASUREMENT from . import LcnDevice from .const import ( CONF_CONNECTIONS, CONF_LOCKABLE, CONF_MAX_TEMP, CONF_MIN_TEMP, CONF_SETPOINT, CONF_SOURCE, DATA_LCN, ) from .helpers import get_connection async def async_setup_platform( hass, hass_config, async_add_entities, discovery_info=None ): """Set up the LCN climate platform.""" if discovery_info is None: return devices = [] for config in discovery_info: address, connection_id = config[CONF_ADDRESS] addr = pypck.lcn_addr.LcnAddr(*address) connections = hass.data[DATA_LCN][CONF_CONNECTIONS] connection = get_connection(connections, connection_id) address_connection = connection.get_address_conn(addr) devices.append(LcnClimate(config, address_connection)) async_add_entities(devices) class LcnClimate(LcnDevice, ClimateEntity): """Representation of a LCN climate device.""" def __init__(self, config, address_connection): """Initialize of a LCN climate device.""" super().__init__(config, address_connection) self.variable = pypck.lcn_defs.Var[config[CONF_SOURCE]] self.setpoint = pypck.lcn_defs.Var[config[CONF_SETPOINT]] self.unit = pypck.lcn_defs.VarUnit.parse(config[CONF_UNIT_OF_MEASUREMENT]) self.regulator_id = pypck.lcn_defs.Var.to_set_point_id(self.setpoint) self.is_lockable = config[CONF_LOCKABLE] self._max_temp = config[CONF_MAX_TEMP] self._min_temp = config[CONF_MIN_TEMP] self._current_temperature = None self._target_temperature = None self._is_on = None async def async_added_to_hass(self): """Run when entity about to be added to hass.""" await super().async_added_to_hass() await self.address_connection.activate_status_request_handler(self.variable) await self.address_connection.activate_status_request_handler(self.setpoint) @property def supported_features(self): """Return the list of supported features.""" return const.SUPPORT_TARGET_TEMPERATURE @property def temperature_unit(self): """Return the unit of measurement.""" return self.unit.value @property def current_temperature(self): """Return the current temperature.""" return self._current_temperature @property def target_temperature(self): """Return the temperature we try to reach.""" return self._target_temperature @property def hvac_mode(self): """Return hvac operation ie. heat, cool mode. Need to be one of HVAC_MODE_*. """ if self._is_on: return const.HVAC_MODE_HEAT return const.HVAC_MODE_OFF @property def hvac_modes(self): """Return the list of available hvac operation modes. Need to be a subset of HVAC_MODES. """ modes = [const.HVAC_MODE_HEAT] if self.is_lockable: modes.append(const.HVAC_MODE_OFF) return modes @property def max_temp(self): """Return the maximum temperature.""" return self._max_temp @property def min_temp(self): """Return the minimum temperature.""" return self._min_temp async def async_set_hvac_mode(self, hvac_mode): """Set new target hvac mode.""" if hvac_mode == const.HVAC_MODE_HEAT: self._is_on = True self.address_connection.lock_regulator(self.regulator_id, False) elif hvac_mode == const.HVAC_MODE_OFF: self._is_on = False self.address_connection.lock_regulator(self.regulator_id, True) self._target_temperature = None self.async_write_ha_state() async def async_set_temperature(self, **kwargs): """Set new target temperature.""" temperature = kwargs.get(ATTR_TEMPERATURE) if temperature is None: return self._target_temperature = temperature self.address_connection.var_abs( self.setpoint, self._target_temperature, self.unit ) self.async_write_ha_state() def input_received(self, input_obj): """Set temperature value when LCN input object is received.""" if not isinstance(input_obj, pypck.inputs.ModStatusVar): return if input_obj.get_var() == self.variable: self._current_temperature = input_obj.get_value().to_var_unit(self.unit) elif input_obj.get_var() == self.setpoint: self._is_on = not input_obj.get_value().is_locked_regulator() if self._is_on: self._target_temperature = input_obj.get_value().to_var_unit(self.unit) self.async_write_ha_state()
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/lcn/climate.py
"""Support for Apple TV media player.""" import logging import pyatv.const as atv_const from homeassistant.components.media_player import MediaPlayerEntity from homeassistant.components.media_player.const import ( MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_SEEK, SUPPORT_STOP, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, ) from homeassistant.const import ( CONF_HOST, CONF_NAME, EVENT_HOMEASSISTANT_STOP, STATE_IDLE, STATE_OFF, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, ) from homeassistant.core import callback import homeassistant.util.dt as dt_util from . import ATTR_ATV, ATTR_POWER, DATA_APPLE_TV, DATA_ENTITIES _LOGGER = logging.getLogger(__name__) SUPPORT_APPLE_TV = ( SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PLAY_MEDIA | SUPPORT_PAUSE | SUPPORT_PLAY | SUPPORT_SEEK | SUPPORT_STOP | SUPPORT_NEXT_TRACK | SUPPORT_PREVIOUS_TRACK ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Apple TV platform.""" if not discovery_info: return # Manage entity cache for service handler if DATA_ENTITIES not in hass.data: hass.data[DATA_ENTITIES] = [] name = discovery_info[CONF_NAME] host = discovery_info[CONF_HOST] atv = hass.data[DATA_APPLE_TV][host][ATTR_ATV] power = hass.data[DATA_APPLE_TV][host][ATTR_POWER] entity = AppleTvDevice(atv, name, power) @callback def on_hass_stop(event): """Stop push updates when hass stops.""" atv.push_updater.stop() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop) if entity not in hass.data[DATA_ENTITIES]: hass.data[DATA_ENTITIES].append(entity) async_add_entities([entity]) class AppleTvDevice(MediaPlayerEntity): """Representation of an Apple TV device.""" def __init__(self, atv, name, power): """Initialize the Apple TV device.""" self.atv = atv self._name = name self._playing = None self._power = power self._power.listeners.append(self) self.atv.push_updater.listener = self async def async_added_to_hass(self): """Handle when an entity is about to be added to Home Assistant.""" self._power.init() @property def name(self): """Return the name of the device.""" return self._name @property def unique_id(self): """Return a unique ID.""" return self.atv.metadata.device_id @property def should_poll(self): """No polling needed.""" return False @property def state(self): """Return the state of the device.""" if not self._power.turned_on: return STATE_OFF if self._playing: state = self._playing.play_state if state in ( atv_const.PLAY_STATE_IDLE, atv_const.PLAY_STATE_NO_MEDIA, atv_const.PLAY_STATE_LOADING, ): return STATE_IDLE if state == atv_const.PLAY_STATE_PLAYING: return STATE_PLAYING if state in ( atv_const.PLAY_STATE_PAUSED, atv_const.PLAY_STATE_FAST_FORWARD, atv_const.PLAY_STATE_FAST_BACKWARD, atv_const.PLAY_STATE_STOPPED, ): # Catch fast forward/backward here so "play" is default action return STATE_PAUSED return STATE_STANDBY # Bad or unknown state? @callback def playstatus_update(self, updater, playing): """Print what is currently playing when it changes.""" self._playing = playing self.async_write_ha_state() @callback def playstatus_error(self, updater, exception): """Inform about an error and restart push updates.""" _LOGGER.warning("A %s error occurred: %s", exception.__class__, exception) # This will wait 10 seconds before restarting push updates. If the # connection continues to fail, it will flood the log (every 10 # seconds) until it succeeds. A better approach should probably be # implemented here later. updater.start(initial_delay=10) self._playing = None self.async_write_ha_state() @property def media_content_type(self): """Content type of current playing media.""" if self._playing: media_type = self._playing.media_type if media_type == atv_const.MEDIA_TYPE_VIDEO: return MEDIA_TYPE_VIDEO if media_type == atv_const.MEDIA_TYPE_MUSIC: return MEDIA_TYPE_MUSIC if media_type == atv_const.MEDIA_TYPE_TV: return MEDIA_TYPE_TVSHOW @property def media_duration(self): """Duration of current playing media in seconds.""" if self._playing: return self._playing.total_time @property def media_position(self): """Position of current playing media in seconds.""" if self._playing: return self._playing.position @property def media_position_updated_at(self): """Last valid time of media position.""" state = self.state if state in (STATE_PLAYING, STATE_PAUSED): return dt_util.utcnow() async def async_play_media(self, media_type, media_id, **kwargs): """Send the play_media command to the media player.""" await self.atv.airplay.play_url(media_id) @property def media_image_hash(self): """Hash value for media image.""" state = self.state if self._playing and state not in [STATE_OFF, STATE_IDLE]: return self._playing.hash async def async_get_media_image(self): """Fetch media image of current playing image.""" state = self.state if self._playing and state not in [STATE_OFF, STATE_IDLE]: return (await self.atv.metadata.artwork()), "image/png" return None, None @property def media_title(self): """Title of current playing media.""" if self._playing: if self.state == STATE_IDLE: return "Nothing playing" title = self._playing.title return title if title else "No title" return f"Establishing a connection to {self._name}..." @property def supported_features(self): """Flag media player features that are supported.""" return SUPPORT_APPLE_TV async def async_turn_on(self): """Turn the media player on.""" self._power.set_power_on(True) async def async_turn_off(self): """Turn the media player off.""" self._playing = None self._power.set_power_on(False) async def async_media_play_pause(self): """Pause media on media player.""" if not self._playing: return state = self.state if state == STATE_PAUSED: await self.atv.remote_control.play() elif state == STATE_PLAYING: await self.atv.remote_control.pause() async def async_media_play(self): """Play media.""" if self._playing: await self.atv.remote_control.play() async def async_media_stop(self): """Stop the media player.""" if self._playing: await self.atv.remote_control.stop() async def async_media_pause(self): """Pause the media player.""" if self._playing: await self.atv.remote_control.pause() async def async_media_next_track(self): """Send next track command.""" if self._playing: await self.atv.remote_control.next() async def async_media_previous_track(self): """Send previous track command.""" if self._playing: await self.atv.remote_control.previous() async def async_media_seek(self, position): """Send seek command.""" if self._playing: await self.atv.remote_control.set_position(position)
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/apple_tv/media_player.py
"""Support for Sonarr sensors.""" from datetime import timedelta import logging from typing import Any, Callable, Dict, List, Optional from sonarr import Sonarr, SonarrConnectionError, SonarrError from homeassistant.config_entries import ConfigEntry from homeassistant.const import DATA_GIGABYTES from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import HomeAssistantType import homeassistant.util.dt as dt_util from . import SonarrEntity from .const import CONF_UPCOMING_DAYS, CONF_WANTED_MAX_ITEMS, DATA_SONARR, DOMAIN _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities: Callable[[List[Entity], bool], None], ) -> None: """Set up Sonarr sensors based on a config entry.""" options = entry.options sonarr = hass.data[DOMAIN][entry.entry_id][DATA_SONARR] entities = [ SonarrCommandsSensor(sonarr, entry.entry_id), SonarrDiskspaceSensor(sonarr, entry.entry_id), SonarrQueueSensor(sonarr, entry.entry_id), SonarrSeriesSensor(sonarr, entry.entry_id), SonarrUpcomingSensor(sonarr, entry.entry_id, days=options[CONF_UPCOMING_DAYS]), SonarrWantedSensor( sonarr, entry.entry_id, max_items=options[CONF_WANTED_MAX_ITEMS] ), ] async_add_entities(entities, True) def sonarr_exception_handler(func): """Decorate Sonarr calls to handle Sonarr exceptions. A decorator that wraps the passed in function, catches Sonarr errors, and handles the availability of the entity. """ async def handler(self, *args, **kwargs): try: await func(self, *args, **kwargs) self.last_update_success = True except SonarrConnectionError as error: if self.available: _LOGGER.error("Error communicating with API: %s", error) self.last_update_success = False except SonarrError as error: if self.available: _LOGGER.error("Invalid response from API: %s", error) self.last_update_success = False return handler class SonarrSensor(SonarrEntity): """Implementation of the Sonarr sensor.""" def __init__( self, *, sonarr: Sonarr, entry_id: str, enabled_default: bool = True, icon: str, key: str, name: str, unit_of_measurement: Optional[str] = None, ) -> None: """Initialize Sonarr sensor.""" self._unit_of_measurement = unit_of_measurement self._key = key self._unique_id = f"{entry_id}_{key}" self.last_update_success = False super().__init__( sonarr=sonarr, entry_id=entry_id, device_id=entry_id, name=name, icon=icon, enabled_default=enabled_default, ) @property def unique_id(self) -> str: """Return the unique ID for this sensor.""" return self._unique_id @property def available(self) -> bool: """Return sensor availability.""" return self.last_update_success @property def unit_of_measurement(self) -> str: """Return the unit this state is expressed in.""" return self._unit_of_measurement class SonarrCommandsSensor(SonarrSensor): """Defines a Sonarr Commands sensor.""" def __init__(self, sonarr: Sonarr, entry_id: str) -> None: """Initialize Sonarr Commands sensor.""" self._commands = [] super().__init__( sonarr=sonarr, entry_id=entry_id, icon="mdi:code-braces", key="commands", name=f"{sonarr.app.info.app_name} Commands", unit_of_measurement="Commands", enabled_default=False, ) @sonarr_exception_handler async def async_update(self) -> None: """Update entity.""" self._commands = await self.sonarr.commands() @property def device_state_attributes(self) -> Optional[Dict[str, Any]]: """Return the state attributes of the entity.""" attrs = {} for command in self._commands: attrs[command.name] = command.state return attrs @property def state(self) -> int: """Return the state of the sensor.""" return len(self._commands) class SonarrDiskspaceSensor(SonarrSensor): """Defines a Sonarr Disk Space sensor.""" def __init__(self, sonarr: Sonarr, entry_id: str) -> None: """Initialize Sonarr Disk Space sensor.""" self._disks = [] self._total_free = 0 super().__init__( sonarr=sonarr, entry_id=entry_id, icon="mdi:harddisk", key="diskspace", name=f"{sonarr.app.info.app_name} Disk Space", unit_of_measurement=DATA_GIGABYTES, enabled_default=False, ) @sonarr_exception_handler async def async_update(self) -> None: """Update entity.""" app = await self.sonarr.update() self._disks = app.disks self._total_free = sum([disk.free for disk in self._disks]) @property def device_state_attributes(self) -> Optional[Dict[str, Any]]: """Return the state attributes of the entity.""" attrs = {} for disk in self._disks: free = disk.free / 1024 ** 3 total = disk.total / 1024 ** 3 usage = free / total * 100 attrs[ disk.path ] = f"{free:.2f}/{total:.2f}{self._unit_of_measurement} ({usage:.2f}%)" return attrs @property def state(self) -> str: """Return the state of the sensor.""" free = self._total_free / 1024 ** 3 return f"{free:.2f}" class SonarrQueueSensor(SonarrSensor): """Defines a Sonarr Queue sensor.""" def __init__(self, sonarr: Sonarr, entry_id: str) -> None: """Initialize Sonarr Queue sensor.""" self._queue = [] super().__init__( sonarr=sonarr, entry_id=entry_id, icon="mdi:download", key="queue", name=f"{sonarr.app.info.app_name} Queue", unit_of_measurement="Episodes", enabled_default=False, ) @sonarr_exception_handler async def async_update(self) -> None: """Update entity.""" self._queue = await self.sonarr.queue() @property def device_state_attributes(self) -> Optional[Dict[str, Any]]: """Return the state attributes of the entity.""" attrs = {} for item in self._queue: remaining = 1 if item.size == 0 else item.size_remaining / item.size remaining_pct = 100 * (1 - remaining) name = f"{item.episode.series.title} {item.episode.identifier}" attrs[name] = f"{remaining_pct:.2f}%" return attrs @property def state(self) -> int: """Return the state of the sensor.""" return len(self._queue) class SonarrSeriesSensor(SonarrSensor): """Defines a Sonarr Series sensor.""" def __init__(self, sonarr: Sonarr, entry_id: str) -> None: """Initialize Sonarr Series sensor.""" self._items = [] super().__init__( sonarr=sonarr, entry_id=entry_id, icon="mdi:television", key="series", name=f"{sonarr.app.info.app_name} Shows", unit_of_measurement="Series", enabled_default=False, ) @sonarr_exception_handler async def async_update(self) -> None: """Update entity.""" self._items = await self.sonarr.series() @property def device_state_attributes(self) -> Optional[Dict[str, Any]]: """Return the state attributes of the entity.""" attrs = {} for item in self._items: attrs[item.series.title] = f"{item.downloaded}/{item.episodes} Episodes" return attrs @property def state(self) -> int: """Return the state of the sensor.""" return len(self._items) class SonarrUpcomingSensor(SonarrSensor): """Defines a Sonarr Upcoming sensor.""" def __init__(self, sonarr: Sonarr, entry_id: str, days: int = 1) -> None: """Initialize Sonarr Upcoming sensor.""" self._days = days self._upcoming = [] super().__init__( sonarr=sonarr, entry_id=entry_id, icon="mdi:television", key="upcoming", name=f"{sonarr.app.info.app_name} Upcoming", unit_of_measurement="Episodes", ) async def async_added_to_hass(self): """Listen for signals.""" await super().async_added_to_hass() self.async_on_remove( async_dispatcher_connect( self.hass, f"sonarr.{self._entry_id}.entry_options_update", self.async_update_entry_options, ) ) @sonarr_exception_handler async def async_update(self) -> None: """Update entity.""" local = dt_util.start_of_local_day().replace(microsecond=0) start = dt_util.as_utc(local) end = start + timedelta(days=self._days) self._upcoming = await self.sonarr.calendar( start=start.isoformat(), end=end.isoformat() ) async def async_update_entry_options(self, options: dict) -> None: """Update sensor settings when config entry options are update.""" self._days = options[CONF_UPCOMING_DAYS] @property def device_state_attributes(self) -> Optional[Dict[str, Any]]: """Return the state attributes of the entity.""" attrs = {} for episode in self._upcoming: attrs[episode.series.title] = episode.identifier return attrs @property def state(self) -> int: """Return the state of the sensor.""" return len(self._upcoming) class SonarrWantedSensor(SonarrSensor): """Defines a Sonarr Wanted sensor.""" def __init__(self, sonarr: Sonarr, entry_id: str, max_items: int = 10) -> None: """Initialize Sonarr Wanted sensor.""" self._max_items = max_items self._results = None self._total: Optional[int] = None super().__init__( sonarr=sonarr, entry_id=entry_id, icon="mdi:television", key="wanted", name=f"{sonarr.app.info.app_name} Wanted", unit_of_measurement="Episodes", enabled_default=False, ) async def async_added_to_hass(self): """Listen for signals.""" await super().async_added_to_hass() self.async_on_remove( async_dispatcher_connect( self.hass, f"sonarr.{self._entry_id}.entry_options_update", self.async_update_entry_options, ) ) @sonarr_exception_handler async def async_update(self) -> None: """Update entity.""" self._results = await self.sonarr.wanted(page_size=self._max_items) self._total = self._results.total async def async_update_entry_options(self, options: dict) -> None: """Update sensor settings when config entry options are update.""" self._max_items = options[CONF_WANTED_MAX_ITEMS] @property def device_state_attributes(self) -> Optional[Dict[str, Any]]: """Return the state attributes of the entity.""" attrs = {} if self._results is not None: for episode in self._results.episodes: name = f"{episode.series.title} {episode.identifier}" attrs[name] = episode.airdate return attrs @property def state(self) -> Optional[int]: """Return the state of the sensor.""" return self._total
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/sonarr/sensor.py
"""Config flow to configure Coolmaster.""" from pycoolmasternet_async import CoolMasterNet import voluptuous as vol from homeassistant import config_entries, core from homeassistant.const import CONF_HOST, CONF_PORT # pylint: disable=unused-import from .const import AVAILABLE_MODES, CONF_SUPPORTED_MODES, DEFAULT_PORT, DOMAIN MODES_SCHEMA = {vol.Required(mode, default=True): bool for mode in AVAILABLE_MODES} DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str, **MODES_SCHEMA}) async def _validate_connection(hass: core.HomeAssistant, host): cool = CoolMasterNet(host, DEFAULT_PORT) units = await cool.status() return bool(units) class CoolmasterConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a Coolmaster config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL @core.callback def _async_get_entry(self, data): supported_modes = [ key for (key, value) in data.items() if key in AVAILABLE_MODES and value ] return self.async_create_entry( title=data[CONF_HOST], data={ CONF_HOST: data[CONF_HOST], CONF_PORT: DEFAULT_PORT, CONF_SUPPORTED_MODES: supported_modes, }, ) async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" if user_input is None: return self.async_show_form(step_id="user", data_schema=DATA_SCHEMA) errors = {} host = user_input[CONF_HOST] try: result = await _validate_connection(self.hass, host) if not result: errors["base"] = "no_units" except (OSError, ConnectionRefusedError, TimeoutError): errors["base"] = "cannot_connect" if errors: return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors ) return self._async_get_entry(user_input)
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/coolmaster/config_flow.py
"""Reproduce an Remote state.""" import asyncio import logging from typing import Any, Dict, Iterable, Optional from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, State from homeassistant.helpers.typing import HomeAssistantType from . import DOMAIN _LOGGER = logging.getLogger(__name__) VALID_STATES = {STATE_ON, STATE_OFF} async def _async_reproduce_state( hass: HomeAssistantType, state: State, *, context: Optional[Context] = None, reproduce_options: Optional[Dict[str, Any]] = None, ) -> None: """Reproduce a single state.""" cur_state = hass.states.get(state.entity_id) if cur_state is None: _LOGGER.warning("Unable to find entity %s", state.entity_id) return if state.state not in VALID_STATES: _LOGGER.warning( "Invalid state specified for %s: %s", state.entity_id, state.state ) return # Return if we are already at the right state. if cur_state.state == state.state: return service_data = {ATTR_ENTITY_ID: state.entity_id} if state.state == STATE_ON: service = SERVICE_TURN_ON elif state.state == STATE_OFF: service = SERVICE_TURN_OFF await hass.services.async_call( DOMAIN, service, service_data, context=context, blocking=True ) async def async_reproduce_states( hass: HomeAssistantType, states: Iterable[State], *, context: Optional[Context] = None, reproduce_options: Optional[Dict[str, Any]] = None, ) -> None: """Reproduce Remote states.""" await asyncio.gather( *( _async_reproduce_state( hass, state, context=context, reproduce_options=reproduce_options ) for state in states ) )
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/remote/reproduce_state.py
"""Support for powering relays in a DoorBird video doorbell.""" import datetime from homeassistant.components.switch import SwitchEntity import homeassistant.util.dt as dt_util from .const import DOMAIN, DOOR_STATION, DOOR_STATION_INFO from .entity import DoorBirdEntity IR_RELAY = "__ir_light__" async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the DoorBird switch platform.""" entities = [] config_entry_id = config_entry.entry_id doorstation = hass.data[DOMAIN][config_entry_id][DOOR_STATION] doorstation_info = hass.data[DOMAIN][config_entry_id][DOOR_STATION_INFO] relays = doorstation_info["RELAYS"] relays.append(IR_RELAY) for relay in relays: switch = DoorBirdSwitch(doorstation, doorstation_info, relay) entities.append(switch) async_add_entities(entities) class DoorBirdSwitch(DoorBirdEntity, SwitchEntity): """A relay in a DoorBird device.""" def __init__(self, doorstation, doorstation_info, relay): """Initialize a relay in a DoorBird device.""" super().__init__(doorstation, doorstation_info) self._doorstation = doorstation self._relay = relay self._state = False self._assume_off = datetime.datetime.min if relay == IR_RELAY: self._time = datetime.timedelta(minutes=5) else: self._time = datetime.timedelta(seconds=5) self._unique_id = f"{self._mac_addr}_{self._relay}" @property def unique_id(self): """Switch unique id.""" return self._unique_id @property def name(self): """Return the name of the switch.""" if self._relay == IR_RELAY: return f"{self._doorstation.name} IR" return f"{self._doorstation.name} Relay {self._relay}" @property def icon(self): """Return the icon to display.""" return "mdi:lightbulb" if self._relay == IR_RELAY else "mdi:dip-switch" @property def is_on(self): """Get the assumed state of the relay.""" return self._state def turn_on(self, **kwargs): """Power the relay.""" if self._relay == IR_RELAY: self._state = self._doorstation.device.turn_light_on() else: self._state = self._doorstation.device.energize_relay(self._relay) now = dt_util.utcnow() self._assume_off = now + self._time def turn_off(self, **kwargs): """Turn off the relays is not needed. They are time-based.""" raise NotImplementedError("DoorBird relays cannot be manually turned off.") async def async_update(self): """Wait for the correct amount of assumed time to pass.""" if self._state and self._assume_off <= dt_util.utcnow(): self._state = False self._assume_off = datetime.datetime.min
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/doorbird/switch.py
"""Support for an Intergas heater via an InComfort/InTouch Lan2RF gateway.""" from typing import Any, Dict, Optional from homeassistant.components.binary_sensor import ( DOMAIN as BINARY_SENSOR_DOMAIN, BinarySensorEntity, ) from . import DOMAIN, IncomfortChild async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up an InComfort/InTouch binary_sensor device.""" if discovery_info is None: return client = hass.data[DOMAIN]["client"] heaters = hass.data[DOMAIN]["heaters"] async_add_entities([IncomfortFailed(client, h) for h in heaters]) class IncomfortFailed(IncomfortChild, BinarySensorEntity): """Representation of an InComfort Failed sensor.""" def __init__(self, client, heater) -> None: """Initialize the binary sensor.""" super().__init__() self._unique_id = f"{heater.serial_no}_failed" self.entity_id = f"{BINARY_SENSOR_DOMAIN}.{DOMAIN}_failed" self._name = "Boiler Fault" self._client = client self._heater = heater @property def is_on(self) -> bool: """Return the status of the sensor.""" return self._heater.status["is_failed"] @property def device_state_attributes(self) -> Optional[Dict[str, Any]]: """Return the device state attributes.""" return {"fault_code": self._heater.status["fault_code"]}
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/incomfort/binary_sensor.py
"""Manufacturer specific channels module for Zigbee Home Automation.""" from homeassistant.core import callback from .. import registries from ..const import ( ATTR_ATTRIBUTE_ID, ATTR_ATTRIBUTE_NAME, ATTR_VALUE, REPORT_CONFIG_ASAP, REPORT_CONFIG_MAX_INT, REPORT_CONFIG_MIN_INT, SIGNAL_ATTR_UPDATED, UNKNOWN, ) from .base import ZigbeeChannel @registries.ZIGBEE_CHANNEL_REGISTRY.register(registries.SMARTTHINGS_HUMIDITY_CLUSTER) class SmartThingsHumidity(ZigbeeChannel): """Smart Things Humidity channel.""" REPORT_CONFIG = [ { "attr": "measured_value", "config": (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 50), } ] @registries.CHANNEL_ONLY_CLUSTERS.register(0xFD00) @registries.ZIGBEE_CHANNEL_REGISTRY.register(0xFD00) class OsramButton(ZigbeeChannel): """Osram button channel.""" REPORT_CONFIG = [] @registries.CHANNEL_ONLY_CLUSTERS.register(registries.PHILLIPS_REMOTE_CLUSTER) @registries.ZIGBEE_CHANNEL_REGISTRY.register(registries.PHILLIPS_REMOTE_CLUSTER) class PhillipsRemote(ZigbeeChannel): """Phillips remote channel.""" REPORT_CONFIG = [] @registries.CHANNEL_ONLY_CLUSTERS.register(0xFCC0) @registries.ZIGBEE_CHANNEL_REGISTRY.register(0xFCC0) class OppleRemote(ZigbeeChannel): """Opple button channel.""" REPORT_CONFIG = [] @registries.ZIGBEE_CHANNEL_REGISTRY.register( registries.SMARTTHINGS_ACCELERATION_CLUSTER ) class SmartThingsAcceleration(ZigbeeChannel): """Smart Things Acceleration channel.""" REPORT_CONFIG = [ {"attr": "acceleration", "config": REPORT_CONFIG_ASAP}, {"attr": "x_axis", "config": REPORT_CONFIG_ASAP}, {"attr": "y_axis", "config": REPORT_CONFIG_ASAP}, {"attr": "z_axis", "config": REPORT_CONFIG_ASAP}, ] @callback def attribute_updated(self, attrid, value): """Handle attribute updates on this cluster.""" if attrid == self.value_attribute: self.async_send_signal( f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, self._cluster.attributes.get(attrid, [UNKNOWN])[0], value, ) return self.zha_send_event( SIGNAL_ATTR_UPDATED, { ATTR_ATTRIBUTE_ID: attrid, ATTR_ATTRIBUTE_NAME: self._cluster.attributes.get(attrid, [UNKNOWN])[0], ATTR_VALUE: value, }, )
"""The tests for the Updater component.""" import pytest from homeassistant.components import updater from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import mock_component NEW_VERSION = "10000.0" MOCK_VERSION = "10.0" MOCK_DEV_VERSION = "10.0.dev0" MOCK_HUUID = "abcdefg" MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"} MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}} RELEASE_NOTES = "test release notes" @pytest.fixture(autouse=True) def mock_version(): """Mock current version.""" with patch("homeassistant.components.updater.current_version", MOCK_VERSION): yield @pytest.fixture(name="mock_get_newest_version") def mock_get_newest_version_fixture(): """Fixture to mock get_newest_version.""" with patch( "homeassistant.components.updater.get_newest_version", return_value=(NEW_VERSION, RELEASE_NOTES), ) as mock: yield mock @pytest.fixture(name="mock_get_uuid", autouse=True) def mock_get_uuid_fixture(): """Fixture to mock get_uuid.""" with patch("homeassistant.helpers.instance_id.async_get") as mock: yield mock async def test_new_version_shows_entity_true( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is true if new version is available.""" mock_get_uuid.return_value = MOCK_HUUID assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == NEW_VERSION ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES ) async def test_same_version_shows_entity_false( hass, mock_get_uuid, mock_get_newest_version ): """Test if sensor is false if no new version is available.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == MOCK_VERSION ) assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version): """Test we do not gather analytics when disable reporting is active.""" mock_get_uuid.return_value = MOCK_HUUID mock_get_newest_version.return_value = (MOCK_VERSION, "") assert await async_setup_component( hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}} ) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "off") await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG) call = mock_get_newest_version.mock_calls[0][1] assert call[0] is hass assert call[1] is None async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock): """Test we do not gather analytics when no huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception ): res = await updater.get_newest_version(hass, None, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock): """Test we gather analytics when huuid is passed in.""" aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ): res = await updater.get_newest_version(hass, MOCK_HUUID, False) assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"]) async def test_error_fetching_new_version_bad_json(hass, aioclient_mock): """Test we handle json error while fetching new version.""" aioclient_mock.post(updater.UPDATER_URL, text="not json") with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock): """Test we handle response error while fetching new version.""" aioclient_mock.post( updater.UPDATER_URL, json={ "version": "0.15" # 'release-notes' is missing }, ) with patch( "homeassistant.helpers.system_info.async_get_system_info", return_value={"fake": "bla"}, ), pytest.raises(UpdateFailed): await updater.get_newest_version(hass, MOCK_HUUID, False) async def test_new_version_shows_entity_after_hour_hassio( hass, mock_get_uuid, mock_get_newest_version ): """Test if binary sensor gets updated if new version is available / Hass.io.""" mock_get_uuid.return_value = MOCK_HUUID mock_component(hass, "hassio") hass.data["hassio_core_info"] = {"version_latest": "999.0"} assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}}) await hass.async_block_till_done() assert hass.states.is_state("binary_sensor.updater", "on") assert ( hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0" ) assert ( hass.states.get("binary_sensor.updater").attributes["release_notes"] == RELEASE_NOTES )
mezz64/home-assistant
tests/components/updater/test_init.py
homeassistant/components/zha/core/channels/manufacturerspecific.py
# -*- coding: utf-8 -*- """ flask.cli ~~~~~~~~~ A simple command line application to run flask apps. :copyright: (c) 2015 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ import os import sys from threading import Lock, Thread from functools import update_wrapper import click from ._compat import iteritems, reraise class NoAppException(click.UsageError): """Raised if an application cannot be found or loaded.""" def find_best_app(module): """Given a module instance this tries to find the best possible application in the module or raises an exception. """ from . import Flask # Search for the most common names first. for attr_name in 'app', 'application': app = getattr(module, attr_name, None) if app is not None and isinstance(app, Flask): return app # Otherwise find the only object that is a Flask instance. matches = [v for k, v in iteritems(module.__dict__) if isinstance(v, Flask)] if len(matches) == 1: return matches[0] raise NoAppException('Failed to find application in module "%s". Are ' 'you sure it contains a Flask application? Maybe ' 'you wrapped it in a WSGI middleware or you are ' 'using a factory function.' % module.__name__) def prepare_exec_for_file(filename): """Given a filename this will try to calculate the python path, add it to the search path and return the actual module name that is expected. """ module = [] # Chop off file extensions or package markers if filename.endswith('.py'): filename = filename[:-3] elif os.path.split(filename)[1] == '__init__.py': filename = os.path.dirname(filename) else: raise NoAppException('The file provided (%s) does exist but is not a ' 'valid Python file. This means that it cannot ' 'be used as application. Please change the ' 'extension to .py' % filename) filename = os.path.realpath(filename) dirpath = filename while 1: dirpath, extra = os.path.split(dirpath) module.append(extra) if not os.path.isfile(os.path.join(dirpath, '__init__.py')): break sys.path.insert(0, dirpath) return '.'.join(module[::-1]) def locate_app(app_id): """Attempts to locate the application.""" __traceback_hide__ = True if ':' in app_id: module, app_obj = app_id.split(':', 1) else: module = app_id app_obj = None __import__(module) mod = sys.modules[module] if app_obj is None: app = find_best_app(mod) else: app = getattr(mod, app_obj, None) if app is None: raise RuntimeError('Failed to find application in module "%s"' % module) return app class DispatchingApp(object): """Special application that dispatches to a flask application which is imported by name in a background thread. If an error happens it is is recorded and shows as part of the WSGI handling which in case of the Werkzeug debugger means that it shows up in the browser. """ def __init__(self, loader, use_eager_loading=False): self.loader = loader self._app = None self._lock = Lock() self._bg_loading_exc_info = None if use_eager_loading: self._load_unlocked() else: self._load_in_background() def _load_in_background(self): def _load_app(): __traceback_hide__ = True with self._lock: try: self._load_unlocked() except Exception: self._bg_loading_exc_info = sys.exc_info() t = Thread(target=_load_app, args=()) t.start() def _flush_bg_loading_exception(self): __traceback_hide__ = True exc_info = self._bg_loading_exc_info if exc_info is not None: self._bg_loading_exc_info = None reraise(*exc_info) def _load_unlocked(self): __traceback_hide__ = True self._app = rv = self.loader() self._bg_loading_exc_info = None return rv def __call__(self, environ, start_response): __traceback_hide__ = True if self._app is not None: return self._app(environ, start_response) self._flush_bg_loading_exception() with self._lock: if self._app is not None: rv = self._app else: rv = self._load_unlocked() return rv(environ, start_response) class ScriptInfo(object): """Help object to deal with Flask applications. This is usually not necessary to interface with as it's used internally in the dispatching to click. """ def __init__(self, app_import_path=None, debug=None, create_app=None): #: The application import path self.app_import_path = app_import_path #: The debug flag. If this is not None, the application will #: automatically have it's debug flag overridden with this value. self.debug = debug #: Optionally a function that is passed the script info to create #: the instance of the application. self.create_app = create_app #: A dictionary with arbitrary data that can be associated with #: this script info. self.data = {} self._loaded_app = None def load_app(self): """Loads the Flask app (if not yet loaded) and returns it. Calling this multiple times will just result in the already loaded app to be returned. """ __traceback_hide__ = True if self._loaded_app is not None: return self._loaded_app if self.create_app is not None: rv = self.create_app(self) else: if self.app_import_path is None: raise NoAppException('Could not locate Flask application. ' 'You did not provide FLASK_APP or the ' '--app parameter.') rv = locate_app(self.app_import_path) if self.debug is not None: rv.debug = self.debug self._loaded_app = rv return rv pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True) def with_appcontext(f): """Wraps a callback so that it's guaranteed to be executed with the script's application context. If callbacks are registered directly to the ``app.cli`` object then they are wrapped with this function by default unless it's disabled. """ @click.pass_context def decorator(__ctx, *args, **kwargs): with __ctx.ensure_object(ScriptInfo).load_app().app_context(): return __ctx.invoke(f, *args, **kwargs) return update_wrapper(decorator, f) def set_debug_value(ctx, param, value): ctx.ensure_object(ScriptInfo).debug = value def set_app_value(ctx, param, value): if value is not None: if os.path.isfile(value): value = prepare_exec_for_file(value) elif '.' not in sys.path: sys.path.insert(0, '.') ctx.ensure_object(ScriptInfo).app_import_path = value debug_option = click.Option(['--debug/--no-debug'], help='Enable or disable debug mode.', default=None, callback=set_debug_value) app_option = click.Option(['-a', '--app'], help='The application to run', callback=set_app_value, is_eager=True) class AppGroup(click.Group): """This works similar to a regular click :class:`~click.Group` but it changes the behavior of the :meth:`command` decorator so that it automatically wraps the functions in :func:`with_appcontext`. Not to be confused with :class:`FlaskGroup`. """ def command(self, *args, **kwargs): """This works exactly like the method of the same name on a regular :class:`click.Group` but it wraps callbacks in :func:`with_appcontext` unless it's disabled by passing ``with_appcontext=False``. """ wrap_for_ctx = kwargs.pop('with_appcontext', True) def decorator(f): if wrap_for_ctx: f = with_appcontext(f) return click.Group.command(self, *args, **kwargs)(f) return decorator def group(self, *args, **kwargs): """This works exactly like the method of the same name on a regular :class:`click.Group` but it defaults the group class to :class:`AppGroup`. """ kwargs.setdefault('cls', AppGroup) return click.Group.group(self, *args, **kwargs) class FlaskGroup(AppGroup): """Special subclass of the :class:`AppGroup` group that supports loading more commands from the configured Flask app. Normally a developer does not have to interface with this class but there are some very advanced use cases for which it makes sense to create an instance of this. For information as of why this is useful see :ref:`custom-scripts`. :param add_default_commands: if this is True then the default run and shell commands wil be added. :param add_app_option: adds the default :option:`--app` option. This gets automatically disabled if a `create_app` callback is defined. :param add_debug_option: adds the default :option:`--debug` option. :param create_app: an optional callback that is passed the script info and returns the loaded app. """ def __init__(self, add_default_commands=True, add_app_option=None, add_debug_option=True, create_app=None, **extra): params = list(extra.pop('params', None) or ()) if add_app_option is None: add_app_option = create_app is None if add_app_option: params.append(app_option) if add_debug_option: params.append(debug_option) AppGroup.__init__(self, params=params, **extra) self.create_app = create_app if add_default_commands: self.add_command(run_command) self.add_command(shell_command) def get_command(self, ctx, name): # We load built-in commands first as these should always be the # same no matter what the app does. If the app does want to # override this it needs to make a custom instance of this group # and not attach the default commands. # # This also means that the script stays functional in case the # application completely fails. rv = AppGroup.get_command(self, ctx, name) if rv is not None: return rv info = ctx.ensure_object(ScriptInfo) try: rv = info.load_app().cli.get_command(ctx, name) if rv is not None: return rv except NoAppException: pass def list_commands(self, ctx): # The commands available is the list of both the application (if # available) plus the builtin commands. rv = set(click.Group.list_commands(self, ctx)) info = ctx.ensure_object(ScriptInfo) try: rv.update(info.load_app().cli.list_commands(ctx)) except Exception: # Here we intentionally swallow all exceptions as we don't # want the help page to break if the app does not exist. # If someone attempts to use the command we try to create # the app again and this will give us the error. pass return sorted(rv) def main(self, *args, **kwargs): obj = kwargs.get('obj') if obj is None: obj = ScriptInfo(create_app=self.create_app) kwargs['obj'] = obj kwargs.setdefault('auto_envvar_prefix', 'FLASK') return AppGroup.main(self, *args, **kwargs) def script_info_option(*args, **kwargs): """This decorator works exactly like :func:`click.option` but is eager by default and stores the value in the :attr:`ScriptInfo.data`. This is useful to further customize an application factory in very complex situations. :param script_info_key: this is a mandatory keyword argument which defines under which data key the value should be stored. """ try: key = kwargs.pop('script_info_key') except LookupError: raise TypeError('script_info_key not provided.') real_callback = kwargs.get('callback') def callback(ctx, param, value): if real_callback is not None: value = real_callback(ctx, value) ctx.ensure_object(ScriptInfo).data[key] = value return value kwargs['callback'] = callback kwargs.setdefault('is_eager', True) return click.option(*args, **kwargs) @click.command('run', short_help='Runs a development server.') @click.option('--host', '-h', default='127.0.0.1', help='The interface to bind to.') @click.option('--port', '-p', default=5000, help='The port to bind to.') @click.option('--reload/--no-reload', default=None, help='Enable or disable the reloader. By default the reloader ' 'is active if debug is enabled.') @click.option('--debugger/--no-debugger', default=None, help='Enable or disable the debugger. By default the debugger ' 'is active if debug is enabled.') @click.option('--eager-loading/--lazy-loader', default=None, help='Enable or disable eager loading. By default eager ' 'loading is enabled if the reloader is disabled.') @click.option('--with-threads/--without-threads', default=False, help='Enable or disable multithreading.') @pass_script_info def run_command(info, host, port, reload, debugger, eager_loading, with_threads): """Runs a local development server for the Flask application. This local server is recommended for development purposes only but it can also be used for simple intranet deployments. By default it will not support any sort of concurrency at all to simplify debugging. This can be changed with the --with-threads option which will enable basic multithreading. The reloader and debugger are by default enabled if the debug flag of Flask is enabled and disabled otherwise. """ from werkzeug.serving import run_simple if reload is None: reload = info.debug if debugger is None: debugger = info.debug if eager_loading is None: eager_loading = not reload app = DispatchingApp(info.load_app, use_eager_loading=eager_loading) # Extra startup messages. This depends a but on Werkzeug internals to # not double execute when the reloader kicks in. if os.environ.get('WERKZEUG_RUN_MAIN') != 'true': # If we have an import path we can print it out now which can help # people understand what's being served. If we do not have an # import path because the app was loaded through a callback then # we won't print anything. if info.app_import_path is not None: print(' * Serving Flask app "%s"' % info.app_import_path) if info.debug is not None: print(' * Forcing debug %s' % (info.debug and 'on' or 'off')) run_simple(host, port, app, use_reloader=reload, use_debugger=debugger, threaded=with_threads) @click.command('shell', short_help='Runs a shell in the app context.') @with_appcontext def shell_command(): """Runs an interactive Python shell in the context of a given Flask application. The application will populate the default namespace of this shell according to it's configuration. This is useful for executing small snippets of management code without having to manually configuring the application. """ import code from flask.globals import _app_ctx_stack app = _app_ctx_stack.top.app banner = 'Python %s on %s\nApp: %s%s\nInstance: %s' % ( sys.version, sys.platform, app.import_name, app.debug and ' [debug]' or '', app.instance_path, ) ctx = {} # Support the regular Python interpreter startup script if someone # is using it. startup = os.environ.get('PYTHONSTARTUP') if startup and os.path.isfile(startup): with open(startup, 'r') as f: eval(compile(f.read(), startup, 'exec'), ctx) ctx.update(app.make_shell_context()) code.interact(banner=banner, local=ctx) cli = FlaskGroup(help="""\ This shell command acts as general utility script for Flask applications. It loads the application configured (either through the FLASK_APP environment variable or the --app parameter) and then provides commands either provided by the application or Flask itself. The most useful commands are the "run" and "shell" command. Example usage: flask --app=hello --debug run """) def main(as_module=False): this_module = __package__ + '.cli' args = sys.argv[1:] if as_module: if sys.version_info >= (2, 7): name = 'python -m ' + this_module.rsplit('.', 1)[0] else: name = 'python -m ' + this_module # This module is always executed as "python -m flask.run" and as such # we need to ensure that we restore the actual command line so that # the reloader can properly operate. sys.argv = ['-m', this_module] + sys.argv[1:] else: name = None cli.main(args=args, prog_name=name) if __name__ == '__main__': main(as_module=True)
# -*- coding: utf-8 -*- """ tests.blueprints ~~~~~~~~~~~~~~~~ Blueprints (and currently modules) :copyright: (c) 2015 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ import pytest import flask from flask._compat import text_type from werkzeug.http import parse_cache_control_header from jinja2 import TemplateNotFound def test_blueprint_specific_error_handling(): frontend = flask.Blueprint('frontend', __name__) backend = flask.Blueprint('backend', __name__) sideend = flask.Blueprint('sideend', __name__) @frontend.errorhandler(403) def frontend_forbidden(e): return 'frontend says no', 403 @frontend.route('/frontend-no') def frontend_no(): flask.abort(403) @backend.errorhandler(403) def backend_forbidden(e): return 'backend says no', 403 @backend.route('/backend-no') def backend_no(): flask.abort(403) @sideend.route('/what-is-a-sideend') def sideend_no(): flask.abort(403) app = flask.Flask(__name__) app.register_blueprint(frontend) app.register_blueprint(backend) app.register_blueprint(sideend) @app.errorhandler(403) def app_forbidden(e): return 'application itself says no', 403 c = app.test_client() assert c.get('/frontend-no').data == b'frontend says no' assert c.get('/backend-no').data == b'backend says no' assert c.get('/what-is-a-sideend').data == b'application itself says no' def test_blueprint_specific_user_error_handling(): class MyDecoratorException(Exception): pass class MyFunctionException(Exception): pass blue = flask.Blueprint('blue', __name__) @blue.errorhandler(MyDecoratorException) def my_decorator_exception_handler(e): assert isinstance(e, MyDecoratorException) return 'boom' def my_function_exception_handler(e): assert isinstance(e, MyFunctionException) return 'bam' blue.register_error_handler(MyFunctionException, my_function_exception_handler) @blue.route('/decorator') def blue_deco_test(): raise MyDecoratorException() @blue.route('/function') def blue_func_test(): raise MyFunctionException() app = flask.Flask(__name__) app.register_blueprint(blue) c = app.test_client() assert c.get('/decorator').data == b'boom' assert c.get('/function').data == b'bam' def test_blueprint_url_definitions(): bp = flask.Blueprint('test', __name__) @bp.route('/foo', defaults={'baz': 42}) def foo(bar, baz): return '%s/%d' % (bar, baz) @bp.route('/bar') def bar(bar): return text_type(bar) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/1', url_defaults={'bar': 23}) app.register_blueprint(bp, url_prefix='/2', url_defaults={'bar': 19}) c = app.test_client() assert c.get('/1/foo').data == b'23/42' assert c.get('/2/foo').data == b'19/42' assert c.get('/1/bar').data == b'23' assert c.get('/2/bar').data == b'19' def test_blueprint_url_processors(): bp = flask.Blueprint('frontend', __name__, url_prefix='/<lang_code>') @bp.url_defaults def add_language_code(endpoint, values): values.setdefault('lang_code', flask.g.lang_code) @bp.url_value_preprocessor def pull_lang_code(endpoint, values): flask.g.lang_code = values.pop('lang_code') @bp.route('/') def index(): return flask.url_for('.about') @bp.route('/about') def about(): return flask.url_for('.index') app = flask.Flask(__name__) app.register_blueprint(bp) c = app.test_client() assert c.get('/de/').data == b'/de/about' assert c.get('/de/about').data == b'/de/' def test_templates_and_static(test_apps): from blueprintapp import app c = app.test_client() rv = c.get('/') assert rv.data == b'Hello from the Frontend' rv = c.get('/admin/') assert rv.data == b'Hello from the Admin' rv = c.get('/admin/index2') assert rv.data == b'Hello from the Admin' rv = c.get('/admin/static/test.txt') assert rv.data.strip() == b'Admin File' rv.close() rv = c.get('/admin/static/css/test.css') assert rv.data.strip() == b'/* nested file */' rv.close() # try/finally, in case other tests use this app for Blueprint tests. max_age_default = app.config['SEND_FILE_MAX_AGE_DEFAULT'] try: expected_max_age = 3600 if app.config['SEND_FILE_MAX_AGE_DEFAULT'] == expected_max_age: expected_max_age = 7200 app.config['SEND_FILE_MAX_AGE_DEFAULT'] = expected_max_age rv = c.get('/admin/static/css/test.css') cc = parse_cache_control_header(rv.headers['Cache-Control']) assert cc.max_age == expected_max_age rv.close() finally: app.config['SEND_FILE_MAX_AGE_DEFAULT'] = max_age_default with app.test_request_context(): assert flask.url_for('admin.static', filename='test.txt') == '/admin/static/test.txt' with app.test_request_context(): try: flask.render_template('missing.html') except TemplateNotFound as e: assert e.name == 'missing.html' else: assert 0, 'expected exception' with flask.Flask(__name__).test_request_context(): assert flask.render_template('nested/nested.txt') == 'I\'m nested' def test_default_static_cache_timeout(): app = flask.Flask(__name__) class MyBlueprint(flask.Blueprint): def get_send_file_max_age(self, filename): return 100 blueprint = MyBlueprint('blueprint', __name__, static_folder='static') app.register_blueprint(blueprint) # try/finally, in case other tests use this app for Blueprint tests. max_age_default = app.config['SEND_FILE_MAX_AGE_DEFAULT'] try: with app.test_request_context(): unexpected_max_age = 3600 if app.config['SEND_FILE_MAX_AGE_DEFAULT'] == unexpected_max_age: unexpected_max_age = 7200 app.config['SEND_FILE_MAX_AGE_DEFAULT'] = unexpected_max_age rv = blueprint.send_static_file('index.html') cc = parse_cache_control_header(rv.headers['Cache-Control']) assert cc.max_age == 100 rv.close() finally: app.config['SEND_FILE_MAX_AGE_DEFAULT'] = max_age_default def test_templates_list(test_apps): from blueprintapp import app templates = sorted(app.jinja_env.list_templates()) assert templates == ['admin/index.html', 'frontend/index.html'] def test_dotted_names(): frontend = flask.Blueprint('myapp.frontend', __name__) backend = flask.Blueprint('myapp.backend', __name__) @frontend.route('/fe') def frontend_index(): return flask.url_for('myapp.backend.backend_index') @frontend.route('/fe2') def frontend_page2(): return flask.url_for('.frontend_index') @backend.route('/be') def backend_index(): return flask.url_for('myapp.frontend.frontend_index') app = flask.Flask(__name__) app.register_blueprint(frontend) app.register_blueprint(backend) c = app.test_client() assert c.get('/fe').data.strip() == b'/be' assert c.get('/fe2').data.strip() == b'/fe' assert c.get('/be').data.strip() == b'/fe' def test_dotted_names_from_app(): app = flask.Flask(__name__) app.testing = True test = flask.Blueprint('test', __name__) @app.route('/') def app_index(): return flask.url_for('test.index') @test.route('/test/') def index(): return flask.url_for('app_index') app.register_blueprint(test) with app.test_client() as c: rv = c.get('/') assert rv.data == b'/test/' def test_empty_url_defaults(): bp = flask.Blueprint('bp', __name__) @bp.route('/', defaults={'page': 1}) @bp.route('/page/<int:page>') def something(page): return str(page) app = flask.Flask(__name__) app.register_blueprint(bp) c = app.test_client() assert c.get('/').data == b'1' assert c.get('/page/2').data == b'2' def test_route_decorator_custom_endpoint(): bp = flask.Blueprint('bp', __name__) @bp.route('/foo') def foo(): return flask.request.endpoint @bp.route('/bar', endpoint='bar') def foo_bar(): return flask.request.endpoint @bp.route('/bar/123', endpoint='123') def foo_bar_foo(): return flask.request.endpoint @bp.route('/bar/foo') def bar_foo(): return flask.request.endpoint app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.request.endpoint c = app.test_client() assert c.get('/').data == b'index' assert c.get('/py/foo').data == b'bp.foo' assert c.get('/py/bar').data == b'bp.bar' assert c.get('/py/bar/123').data == b'bp.123' assert c.get('/py/bar/foo').data == b'bp.bar_foo' def test_route_decorator_custom_endpoint_with_dots(): bp = flask.Blueprint('bp', __name__) @bp.route('/foo') def foo(): return flask.request.endpoint try: @bp.route('/bar', endpoint='bar.bar') def foo_bar(): return flask.request.endpoint except AssertionError: pass else: raise AssertionError('expected AssertionError not raised') try: @bp.route('/bar/123', endpoint='bar.123') def foo_bar_foo(): return flask.request.endpoint except AssertionError: pass else: raise AssertionError('expected AssertionError not raised') def foo_foo_foo(): pass pytest.raises( AssertionError, lambda: bp.add_url_rule( '/bar/123', endpoint='bar.123', view_func=foo_foo_foo ) ) pytest.raises( AssertionError, bp.route('/bar/123', endpoint='bar.123'), lambda: None ) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') c = app.test_client() assert c.get('/py/foo').data == b'bp.foo' # The rule's didn't actually made it through rv = c.get('/py/bar') assert rv.status_code == 404 rv = c.get('/py/bar/123') assert rv.status_code == 404 def test_template_filter(): bp = flask.Blueprint('bp', __name__) @bp.app_template_filter() def my_reverse(s): return s[::-1] app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') assert 'my_reverse' in app.jinja_env.filters.keys() assert app.jinja_env.filters['my_reverse'] == my_reverse assert app.jinja_env.filters['my_reverse']('abcd') == 'dcba' def test_add_template_filter(): bp = flask.Blueprint('bp', __name__) def my_reverse(s): return s[::-1] bp.add_app_template_filter(my_reverse) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') assert 'my_reverse' in app.jinja_env.filters.keys() assert app.jinja_env.filters['my_reverse'] == my_reverse assert app.jinja_env.filters['my_reverse']('abcd') == 'dcba' def test_template_filter_with_name(): bp = flask.Blueprint('bp', __name__) @bp.app_template_filter('strrev') def my_reverse(s): return s[::-1] app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') assert 'strrev' in app.jinja_env.filters.keys() assert app.jinja_env.filters['strrev'] == my_reverse assert app.jinja_env.filters['strrev']('abcd') == 'dcba' def test_add_template_filter_with_name(): bp = flask.Blueprint('bp', __name__) def my_reverse(s): return s[::-1] bp.add_app_template_filter(my_reverse, 'strrev') app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') assert 'strrev' in app.jinja_env.filters.keys() assert app.jinja_env.filters['strrev'] == my_reverse assert app.jinja_env.filters['strrev']('abcd') == 'dcba' def test_template_filter_with_template(): bp = flask.Blueprint('bp', __name__) @bp.app_template_filter() def super_reverse(s): return s[::-1] app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.render_template('template_filter.html', value='abcd') rv = app.test_client().get('/') assert rv.data == b'dcba' def test_template_filter_after_route_with_template(): app = flask.Flask(__name__) @app.route('/') def index(): return flask.render_template('template_filter.html', value='abcd') bp = flask.Blueprint('bp', __name__) @bp.app_template_filter() def super_reverse(s): return s[::-1] app.register_blueprint(bp, url_prefix='/py') rv = app.test_client().get('/') assert rv.data == b'dcba' def test_add_template_filter_with_template(): bp = flask.Blueprint('bp', __name__) def super_reverse(s): return s[::-1] bp.add_app_template_filter(super_reverse) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.render_template('template_filter.html', value='abcd') rv = app.test_client().get('/') assert rv.data == b'dcba' def test_template_filter_with_name_and_template(): bp = flask.Blueprint('bp', __name__) @bp.app_template_filter('super_reverse') def my_reverse(s): return s[::-1] app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.render_template('template_filter.html', value='abcd') rv = app.test_client().get('/') assert rv.data == b'dcba' def test_add_template_filter_with_name_and_template(): bp = flask.Blueprint('bp', __name__) def my_reverse(s): return s[::-1] bp.add_app_template_filter(my_reverse, 'super_reverse') app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.render_template('template_filter.html', value='abcd') rv = app.test_client().get('/') assert rv.data == b'dcba' def test_template_test(): bp = flask.Blueprint('bp', __name__) @bp.app_template_test() def is_boolean(value): return isinstance(value, bool) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') assert 'is_boolean' in app.jinja_env.tests.keys() assert app.jinja_env.tests['is_boolean'] == is_boolean assert app.jinja_env.tests['is_boolean'](False) def test_add_template_test(): bp = flask.Blueprint('bp', __name__) def is_boolean(value): return isinstance(value, bool) bp.add_app_template_test(is_boolean) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') assert 'is_boolean' in app.jinja_env.tests.keys() assert app.jinja_env.tests['is_boolean'] == is_boolean assert app.jinja_env.tests['is_boolean'](False) def test_template_test_with_name(): bp = flask.Blueprint('bp', __name__) @bp.app_template_test('boolean') def is_boolean(value): return isinstance(value, bool) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') assert 'boolean' in app.jinja_env.tests.keys() assert app.jinja_env.tests['boolean'] == is_boolean assert app.jinja_env.tests['boolean'](False) def test_add_template_test_with_name(): bp = flask.Blueprint('bp', __name__) def is_boolean(value): return isinstance(value, bool) bp.add_app_template_test(is_boolean, 'boolean') app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') assert 'boolean' in app.jinja_env.tests.keys() assert app.jinja_env.tests['boolean'] == is_boolean assert app.jinja_env.tests['boolean'](False) def test_template_test_with_template(): bp = flask.Blueprint('bp', __name__) @bp.app_template_test() def boolean(value): return isinstance(value, bool) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.render_template('template_test.html', value=False) rv = app.test_client().get('/') assert b'Success!' in rv.data def test_template_test_after_route_with_template(): app = flask.Flask(__name__) @app.route('/') def index(): return flask.render_template('template_test.html', value=False) bp = flask.Blueprint('bp', __name__) @bp.app_template_test() def boolean(value): return isinstance(value, bool) app.register_blueprint(bp, url_prefix='/py') rv = app.test_client().get('/') assert b'Success!' in rv.data def test_add_template_test_with_template(): bp = flask.Blueprint('bp', __name__) def boolean(value): return isinstance(value, bool) bp.add_app_template_test(boolean) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.render_template('template_test.html', value=False) rv = app.test_client().get('/') assert b'Success!' in rv.data def test_template_test_with_name_and_template(): bp = flask.Blueprint('bp', __name__) @bp.app_template_test('boolean') def is_boolean(value): return isinstance(value, bool) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.render_template('template_test.html', value=False) rv = app.test_client().get('/') assert b'Success!' in rv.data def test_add_template_test_with_name_and_template(): bp = flask.Blueprint('bp', __name__) def is_boolean(value): return isinstance(value, bool) bp.add_app_template_test(is_boolean, 'boolean') app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.render_template('template_test.html', value=False) rv = app.test_client().get('/') assert b'Success!' in rv.data
GavinCruise/flask
tests/test_blueprints.py
flask/cli.py
from django.utils import timezone from website.mails import mails from website.reviews import signals as reviews_signals from website.settings import DOMAIN, OSF_SUPPORT_EMAIL, OSF_CONTACT_EMAIL from osf.utils.workflows import RegistrationModerationTriggers def get_email_template_context(resource): is_preprint = resource.provider.type == 'osf.preprintprovider' url_segment = 'preprints' if is_preprint else 'registries' document_type = resource.provider.preprint_word if is_preprint else 'registration' base_context = { 'domain': DOMAIN, 'reviewable': resource, 'workflow': resource.provider.reviews_workflow, 'provider_url': resource.provider.domain or f'{DOMAIN}{url_segment}/{resource.provider._id}', 'provider_contact_email': resource.provider.email_contact or OSF_CONTACT_EMAIL, 'provider_support_email': resource.provider.email_support or OSF_SUPPORT_EMAIL, 'document_type': document_type } if document_type == 'registration': base_context['draft_registration'] = resource.draft_registration.get() if document_type == 'registration' and resource.provider.brand: brand = resource.provider.brand base_context['logo_url'] = brand.hero_logo_image base_context['top_bar_color'] = brand.primary_color base_context['provider_name'] = resource.provider.name return base_context def notify_submit(resource, user, *args, **kwargs): context = get_email_template_context(resource) context['referrer'] = user recipients = list(resource.contributors) reviews_signals.reviews_email_submit.send( context=context, recipients=recipients ) reviews_signals.reviews_email_submit_moderators_notifications.send( timestamp=timezone.now(), context=context ) def notify_resubmit(resource, user, action, *args, **kwargs): context = get_email_template_context(resource) reviews_signals.reviews_email.send( creator=user, context=context, template='reviews_resubmission_confirmation', action=action ) def notify_accept_reject(resource, user, action, states, *args, **kwargs): context = get_email_template_context(resource) context['notify_comment'] = not resource.provider.reviews_comments_private and action.comment context['comment'] = action.comment context['requester'] = action.creator context['is_rejected'] = action.to_state == states.REJECTED.db_name context['was_pending'] = action.from_state == states.PENDING.db_name reviews_signals.reviews_email.send( creator=user, context=context, template='reviews_submission_status', action=action ) def notify_edit_comment(resource, user, action, *args, **kwargs): if not resource.provider.reviews_comments_private and action.comment: context = get_email_template_context(resource) context['comment'] = action.comment reviews_signals.reviews_email.send( creator=user, context=context, template='reviews_update_comment', action=action ) def notify_reject_withdraw_request(resource, action, *args, **kwargs): context = get_email_template_context(resource) context['requester'] = action.creator for contributor in resource.contributors.all(): context['contributor'] = contributor context['requester'] = action.creator context['is_requester'] = action.creator == contributor mails.send_mail( contributor.username, mails.WITHDRAWAL_REQUEST_DECLINED, **context ) def notify_moderator_registration_requests_withdrawal(resource, user, *args, **kwargs): context = get_email_template_context(resource) context['referrer'] = user reviews_signals.reviews_withdraw_requests_notification_moderators.send( timestamp=timezone.now(), context=context ) def notify_withdraw_registration(resource, action, *args, **kwargs): context = get_email_template_context(resource) context['force_withdrawal'] = action.trigger == RegistrationModerationTriggers.FORCE_WITHDRAW.db_name context['requester'] = resource.retraction.initiated_by context['comment'] = action.comment context['notify_comment'] = not resource.provider.reviews_comments_private and action.comment for contributor in resource.contributors.all(): context['contributor'] = contributor context['is_requester'] = context['requester'] == contributor mails.send_mail( contributor.username, mails.WITHDRAWAL_REQUEST_GRANTED, **context )
import mock import pytest from osf.utils import workflows from api.base.settings.defaults import API_BASE from api_tests.requests.mixins import NodeRequestTestMixin, PreprintRequestTestMixin from osf_tests.factories import NodeFactory, NodeRequestFactory @pytest.mark.django_db class TestNodeRequestListCreate(NodeRequestTestMixin): @pytest.fixture() def url(self, project): return '/{}nodes/{}/requests/'.format(API_BASE, project._id) @pytest.fixture() def create_payload(self): return { 'data': { 'attributes': { 'comment': 'ASDFG', 'request_type': 'access' }, 'type': 'node-requests' } } def test_noncontrib_can_submit_to_public_node(self, app, project, noncontrib, url, create_payload): project.is_public = True project.save() res = app.post_json_api(url, create_payload, auth=noncontrib.auth) assert res.status_code == 201 def test_noncontrib_can_submit_to_private_node(self, app, project, noncontrib, url, create_payload): assert not project.is_public res = app.post_json_api(url, create_payload, auth=noncontrib.auth) assert res.status_code == 201 def test_must_be_logged_in_to_create(self, app, url, create_payload): res = app.post_json_api(url, create_payload, expect_errors=True) assert res.status_code == 401 def test_contributor_cannot_submit_to_contributed_node(self, app, url, write_contrib, create_payload): res = app.post_json_api(url, create_payload, auth=write_contrib.auth, expect_errors=True) assert res.status_code == 403 assert res.json['errors'][0]['detail'] == 'You cannot request access to a node you contribute to.' def test_admin_can_view_requests(self, app, url, admin, node_request): res = app.get(url, auth=admin.auth) assert res.status_code == 200 assert res.json['data'][0]['id'] == node_request._id def test_write_contrib_cannot_view_requests(self, app, url, write_contrib, node_request): res = app.get(url, auth=write_contrib.auth, expect_errors=True) assert res.status_code == 403 def test_requester_cannot_view_requests(self, app, url, requester, node_request): res = app.get(url, auth=requester.auth, expect_errors=True) assert res.status_code == 403 def test_noncontrib_cannot_view_requests(self, app, url, noncontrib, node_request): res = app.get(url, auth=noncontrib.auth, expect_errors=True) assert res.status_code == 403 def test_requester_cannot_submit_again(self, app, url, requester, node_request, create_payload): res = app.post_json_api(url, create_payload, auth=requester.auth, expect_errors=True) assert res.status_code == 409 assert res.json['errors'][0]['detail'] == 'Users may not have more than one access request per node.' def test_requests_disabled_create(self, app, url, create_payload, project, noncontrib): project.access_requests_enabled = False project.save() res = app.post_json_api(url, create_payload, auth=noncontrib.auth, expect_errors=True) assert res.status_code == 403 def test_requests_disabled_list(self, app, url, create_payload, project, admin): project.access_requests_enabled = False project.save() res = app.get(url, create_payload, auth=admin.auth, expect_errors=True) assert res.status_code == 403 @mock.patch('website.mails.mails.send_mail') def test_email_sent_to_all_admins_on_submit(self, mock_mail, app, project, noncontrib, url, create_payload, second_admin): project.is_public = True project.save() res = app.post_json_api(url, create_payload, auth=noncontrib.auth) assert res.status_code == 201 assert mock_mail.call_count == 2 @mock.patch('website.mails.mails.send_mail') def test_email_not_sent_to_parent_admins_on_submit(self, mock_mail, app, project, noncontrib, url, create_payload, second_admin): component = NodeFactory(parent=project, creator=second_admin) component.is_public = True project.save() url = '/{}nodes/{}/requests/'.format(API_BASE, component._id) res = app.post_json_api(url, create_payload, auth=noncontrib.auth) assert res.status_code == 201 assert component.parent_admin_contributors.count() == 1 assert component.contributors.count() == 1 assert mock_mail.call_count == 1 def test_request_followed_by_added_as_contrib(elf, app, project, noncontrib, admin, url, create_payload): res = app.post_json_api(url, create_payload, auth=noncontrib.auth) assert res.status_code == 201 assert project.requests.filter(creator=noncontrib, machine_state='pending').exists() project.add_contributor(noncontrib, save=True) assert project.is_contributor(noncontrib) assert not project.requests.filter(creator=noncontrib, machine_state='pending').exists() assert project.requests.filter(creator=noncontrib, machine_state='accepted').exists() def test_filter_by_machine_state(self, app, project, noncontrib, url, admin, node_request): initial_node_request = NodeRequestFactory( creator=noncontrib, target=project, request_type=workflows.RequestTypes.ACCESS.value, machine_state=workflows.DefaultStates.INITIAL.value ) filtered_url = '{}?filter[machine_state]=pending'.format(url) res = app.get(filtered_url, auth=admin.auth) assert res.status_code == 200 ids = [result['id'] for result in res.json['data']] assert initial_node_request._id not in ids assert node_request.machine_state == 'pending' and node_request._id in ids @pytest.mark.django_db class TestPreprintRequestListCreate(PreprintRequestTestMixin): def url(self, preprint): return '/{}preprints/{}/requests/'.format(API_BASE, preprint._id) @pytest.fixture() def create_payload(self): return { 'data': { 'attributes': { 'comment': 'ASDFG', 'request_type': 'withdrawal' }, 'type': 'preprint-requests' } } def test_noncontrib_cannot_submit(self, app, noncontrib, create_payload, pre_mod_preprint, post_mod_preprint, none_mod_preprint): for preprint in [pre_mod_preprint, post_mod_preprint, none_mod_preprint]: res = app.post_json_api(self.url(preprint), create_payload, auth=noncontrib.auth, expect_errors=True) assert res.status_code == 403 def test_unauth_cannot_submit(self, app, create_payload, pre_mod_preprint, post_mod_preprint, none_mod_preprint): for preprint in [pre_mod_preprint, post_mod_preprint, none_mod_preprint]: res = app.post_json_api(self.url(preprint), create_payload, expect_errors=True) assert res.status_code == 401 def test_write_contributor_cannot_submit(self, app, write_contrib, create_payload, pre_mod_preprint, post_mod_preprint, none_mod_preprint): for preprint in [pre_mod_preprint, post_mod_preprint, none_mod_preprint]: res = app.post_json_api(self.url(preprint), create_payload, auth=write_contrib.auth, expect_errors=True) assert res.status_code == 403 def test_admin_can_submit(self, app, admin, create_payload, pre_mod_preprint, post_mod_preprint, none_mod_preprint): for preprint in [pre_mod_preprint, post_mod_preprint, none_mod_preprint]: res = app.post_json_api(self.url(preprint), create_payload, auth=admin.auth) assert res.status_code == 201 def test_admin_can_view_requests(self, app, admin, pre_request, post_request, none_request): for request in [pre_request, post_request, none_request]: res = app.get(self.url(request.target), auth=admin.auth) assert res.status_code == 200 assert res.json['data'][0]['id'] == request._id def test_noncontrib_and_write_contrib_cannot_view_requests(self, app, noncontrib, write_contrib, pre_request, post_request, none_request): for request in [pre_request, post_request, none_request]: for user in [noncontrib, write_contrib]: res = app.get(self.url(request.target), auth=user.auth, expect_errors=True) assert res.status_code == 403 def test_unauth_cannot_view_requests(self, app, noncontrib, write_contrib, pre_request, post_request, none_request): for request in [pre_request, post_request, none_request]: res = app.get(self.url(request.target), expect_errors=True) assert res.status_code == 401 def test_requester_cannot_submit_again(self, app, admin, create_payload, pre_mod_preprint, pre_request): res = app.post_json_api(self.url(pre_mod_preprint), create_payload, auth=admin.auth, expect_errors=True) assert res.status_code == 409 assert res.json['errors'][0]['detail'] == 'Users may not have more than one withdrawal request per preprint.' @pytest.mark.skip('TODO: IN-284 -- add emails') @mock.patch('website.reviews.listeners.mails.send_mail') def test_email_sent_to_moderators_on_submit(self, mock_mail, app, admin, create_payload, moderator, post_mod_preprint): res = app.post_json_api(self.url(post_mod_preprint), create_payload, auth=admin.auth) assert res.status_code == 201 assert mock_mail.call_count == 1
baylee-d/osf.io
api_tests/requests/views/test_request_list_create.py
osf/utils/notifications.py
"""Support for SleepIQ sensors.""" from homeassistant.components import sleepiq ICON = "mdi:hotel" def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the SleepIQ sensors.""" if discovery_info is None: return data = sleepiq.DATA data.update() dev = list() for bed_id, bed in data.beds.items(): for side in sleepiq.SIDES: if getattr(bed, side) is not None: dev.append(SleepNumberSensor(data, bed_id, side)) add_entities(dev) class SleepNumberSensor(sleepiq.SleepIQSensor): """Implementation of a SleepIQ sensor.""" def __init__(self, sleepiq_data, bed_id, side): """Initialize the sensor.""" sleepiq.SleepIQSensor.__init__(self, sleepiq_data, bed_id, side) self._state = None self.type = sleepiq.SLEEP_NUMBER self._name = sleepiq.SENSOR_TYPES[self.type] self.update() @property def state(self): """Return the state of the sensor.""" return self._state @property def icon(self): """Icon to use in the frontend, if any.""" return ICON def update(self): """Get the latest data from SleepIQ and updates the states.""" sleepiq.SleepIQSensor.update(self) self._state = self.side.sleep_number
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/sleepiq/sensor.py
"""Real-time information about public transport departures in Norway.""" from datetime import datetime, timedelta import logging import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( ATTR_ATTRIBUTION, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME, CONF_SHOW_ON_MAP, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) API_CLIENT_NAME = "homeassistant-homeassistant" ATTRIBUTION = "Data provided by entur.org under NLOD" CONF_STOP_IDS = "stop_ids" CONF_EXPAND_PLATFORMS = "expand_platforms" CONF_WHITELIST_LINES = "line_whitelist" CONF_OMIT_NON_BOARDING = "omit_non_boarding" CONF_NUMBER_OF_DEPARTURES = "number_of_departures" DEFAULT_NAME = "Entur" DEFAULT_ICON_KEY = "bus" ICONS = { "air": "mdi:airplane", "bus": "mdi:bus", "metro": "mdi:subway", "rail": "mdi:train", "tram": "mdi:tram", "water": "mdi:ferry", } SCAN_INTERVAL = timedelta(seconds=45) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_STOP_IDS): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_EXPAND_PLATFORMS, default=True): cv.boolean, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean, vol.Optional(CONF_WHITELIST_LINES, default=[]): cv.ensure_list, vol.Optional(CONF_OMIT_NON_BOARDING, default=True): cv.boolean, vol.Optional(CONF_NUMBER_OF_DEPARTURES, default=2): vol.All( cv.positive_int, vol.Range(min=2, max=10) ), } ) ATTR_STOP_ID = "stop_id" ATTR_ROUTE = "route" ATTR_ROUTE_ID = "route_id" ATTR_EXPECTED_AT = "due_at" ATTR_DELAY = "delay" ATTR_REALTIME = "real_time" ATTR_NEXT_UP_IN = "next_due_in" ATTR_NEXT_UP_ROUTE = "next_route" ATTR_NEXT_UP_ROUTE_ID = "next_route_id" ATTR_NEXT_UP_AT = "next_due_at" ATTR_NEXT_UP_DELAY = "next_delay" ATTR_NEXT_UP_REALTIME = "next_real_time" ATTR_TRANSPORT_MODE = "transport_mode" def due_in_minutes(timestamp: datetime) -> int: """Get the time in minutes from a timestamp.""" if timestamp is None: return None diff = timestamp - dt_util.now() return int(diff.total_seconds() / 60) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Entur public transport sensor.""" from enturclient import EnturPublicTransportData expand = config.get(CONF_EXPAND_PLATFORMS) line_whitelist = config.get(CONF_WHITELIST_LINES) name = config.get(CONF_NAME) show_on_map = config.get(CONF_SHOW_ON_MAP) stop_ids = config.get(CONF_STOP_IDS) omit_non_boarding = config.get(CONF_OMIT_NON_BOARDING) number_of_departures = config.get(CONF_NUMBER_OF_DEPARTURES) stops = [s for s in stop_ids if "StopPlace" in s] quays = [s for s in stop_ids if "Quay" in s] data = EnturPublicTransportData( API_CLIENT_NAME, stops=stops, quays=quays, line_whitelist=line_whitelist, omit_non_boarding=omit_non_boarding, number_of_departures=number_of_departures, web_session=async_get_clientsession(hass), ) if expand: await data.expand_all_quays() await data.update() proxy = EnturProxy(data) entities = [] for place in data.all_stop_places_quays(): try: given_name = "{} {}".format(name, data.get_stop_info(place).name) except KeyError: given_name = f"{name} {place}" entities.append( EnturPublicTransportSensor(proxy, given_name, place, show_on_map) ) async_add_entities(entities, True) class EnturProxy: """Proxy for the Entur client. Ensure throttle to not hit rate limiting on the API. """ def __init__(self, api): """Initialize the proxy.""" self._api = api @Throttle(timedelta(seconds=15)) async def async_update(self) -> None: """Update data in client.""" await self._api.update() def get_stop_info(self, stop_id: str) -> dict: """Get info about specific stop place.""" return self._api.get_stop_info(stop_id) class EnturPublicTransportSensor(Entity): """Implementation of a Entur public transport sensor.""" def __init__(self, api: EnturProxy, name: str, stop: str, show_on_map: bool): """Initialize the sensor.""" self.api = api self._stop = stop self._show_on_map = show_on_map self._name = name self._state = None self._icon = ICONS[DEFAULT_ICON_KEY] self._attributes = {} @property def name(self) -> str: """Return the name of the sensor.""" return self._name @property def state(self) -> str: """Return the state of the sensor.""" return self._state @property def device_state_attributes(self) -> dict: """Return the state attributes.""" self._attributes[ATTR_ATTRIBUTION] = ATTRIBUTION self._attributes[ATTR_STOP_ID] = self._stop return self._attributes @property def unit_of_measurement(self) -> str: """Return the unit this state is expressed in.""" return "min" @property def icon(self) -> str: """Icon to use in the frontend.""" return self._icon async def async_update(self) -> None: """Get the latest data and update the states.""" await self.api.async_update() self._attributes = {} data = self.api.get_stop_info(self._stop) if data is None: self._state = None return if self._show_on_map and data.latitude and data.longitude: self._attributes[CONF_LATITUDE] = data.latitude self._attributes[CONF_LONGITUDE] = data.longitude calls = data.estimated_calls if not calls: self._state = None return self._state = due_in_minutes(calls[0].expected_departure_time) self._icon = ICONS.get(calls[0].transport_mode, ICONS[DEFAULT_ICON_KEY]) self._attributes[ATTR_ROUTE] = calls[0].front_display self._attributes[ATTR_ROUTE_ID] = calls[0].line_id self._attributes[ATTR_EXPECTED_AT] = calls[0].expected_departure_time.strftime( "%H:%M" ) self._attributes[ATTR_REALTIME] = calls[0].is_realtime self._attributes[ATTR_DELAY] = calls[0].delay_in_min number_of_calls = len(calls) if number_of_calls < 2: return self._attributes[ATTR_NEXT_UP_ROUTE] = calls[1].front_display self._attributes[ATTR_NEXT_UP_ROUTE_ID] = calls[1].line_id self._attributes[ATTR_NEXT_UP_AT] = calls[1].expected_departure_time.strftime( "%H:%M" ) self._attributes[ATTR_NEXT_UP_IN] = "{} min".format( due_in_minutes(calls[1].expected_departure_time) ) self._attributes[ATTR_NEXT_UP_REALTIME] = calls[1].is_realtime self._attributes[ATTR_NEXT_UP_DELAY] = calls[1].delay_in_min if number_of_calls < 3: return for i, call in enumerate(calls[2:]): key_name = "departure_#" + str(i + 3) self._attributes[key_name] = "{}{} {}".format( "" if bool(call.is_realtime) else "ca. ", call.expected_departure_time.strftime("%H:%M"), call.front_display, )
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/entur_public_transport/sensor.py
"""Support for ISY994 covers.""" import logging from typing import Callable from homeassistant.components.cover import DOMAIN, CoverDevice from homeassistant.const import ( STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING, STATE_UNKNOWN, ) from homeassistant.helpers.typing import ConfigType from . import ISY994_NODES, ISY994_PROGRAMS, ISYDevice _LOGGER = logging.getLogger(__name__) VALUE_TO_STATE = { 0: STATE_CLOSED, 101: STATE_UNKNOWN, 102: "stopped", 103: STATE_CLOSING, 104: STATE_OPENING, } def setup_platform( hass, config: ConfigType, add_entities: Callable[[list], None], discovery_info=None ): """Set up the ISY994 cover platform.""" devices = [] for node in hass.data[ISY994_NODES][DOMAIN]: devices.append(ISYCoverDevice(node)) for name, status, actions in hass.data[ISY994_PROGRAMS][DOMAIN]: devices.append(ISYCoverProgram(name, status, actions)) add_entities(devices) class ISYCoverDevice(ISYDevice, CoverDevice): """Representation of an ISY994 cover device.""" @property def current_cover_position(self) -> int: """Return the current cover position.""" if self.is_unknown() or self.value is None: return None return sorted((0, self.value, 100))[1] @property def is_closed(self) -> bool: """Get whether the ISY994 cover device is closed.""" return self.state == STATE_CLOSED @property def state(self) -> str: """Get the state of the ISY994 cover device.""" if self.is_unknown(): return None return VALUE_TO_STATE.get(self.value, STATE_OPEN) def open_cover(self, **kwargs) -> None: """Send the open cover command to the ISY994 cover device.""" if not self._node.on(val=100): _LOGGER.error("Unable to open the cover") def close_cover(self, **kwargs) -> None: """Send the close cover command to the ISY994 cover device.""" if not self._node.off(): _LOGGER.error("Unable to close the cover") class ISYCoverProgram(ISYCoverDevice): """Representation of an ISY994 cover program.""" def __init__(self, name: str, node: object, actions: object) -> None: """Initialize the ISY994 cover program.""" super().__init__(node) self._name = name self._actions = actions @property def state(self) -> str: """Get the state of the ISY994 cover program.""" return STATE_CLOSED if bool(self.value) else STATE_OPEN def open_cover(self, **kwargs) -> None: """Send the open cover command to the ISY994 cover program.""" if not self._actions.runThen(): _LOGGER.error("Unable to open the cover") def close_cover(self, **kwargs) -> None: """Send the close cover command to the ISY994 cover program.""" if not self._actions.runElse(): _LOGGER.error("Unable to close the cover")
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/isy994/cover.py
"""Register a custom front end panel.""" import logging import os import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.loader import bind_hass _LOGGER = logging.getLogger(__name__) DOMAIN = "panel_custom" CONF_COMPONENT_NAME = "name" CONF_SIDEBAR_TITLE = "sidebar_title" CONF_SIDEBAR_ICON = "sidebar_icon" CONF_URL_PATH = "url_path" CONF_CONFIG = "config" CONF_WEBCOMPONENT_PATH = "webcomponent_path" CONF_JS_URL = "js_url" CONF_MODULE_URL = "module_url" CONF_EMBED_IFRAME = "embed_iframe" CONF_TRUST_EXTERNAL_SCRIPT = "trust_external_script" CONF_URL_EXCLUSIVE_GROUP = "url_exclusive_group" CONF_REQUIRE_ADMIN = "require_admin" MSG_URL_CONFLICT = "Pass in only one of webcomponent_path, module_url or js_url" DEFAULT_EMBED_IFRAME = False DEFAULT_TRUST_EXTERNAL = False DEFAULT_ICON = "mdi:bookmark" LEGACY_URL = "/api/panel_custom/{}" PANEL_DIR = "panels" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.All( cv.ensure_list, [ vol.Schema( { vol.Required(CONF_COMPONENT_NAME): cv.string, vol.Optional(CONF_SIDEBAR_TITLE): cv.string, vol.Optional(CONF_SIDEBAR_ICON, default=DEFAULT_ICON): cv.icon, vol.Optional(CONF_URL_PATH): cv.string, vol.Optional(CONF_CONFIG): dict, vol.Exclusive( CONF_WEBCOMPONENT_PATH, CONF_URL_EXCLUSIVE_GROUP, msg=MSG_URL_CONFLICT, ): cv.string, vol.Exclusive( CONF_JS_URL, CONF_URL_EXCLUSIVE_GROUP, msg=MSG_URL_CONFLICT ): cv.string, vol.Exclusive( CONF_MODULE_URL, CONF_URL_EXCLUSIVE_GROUP, msg=MSG_URL_CONFLICT, ): cv.string, vol.Optional( CONF_EMBED_IFRAME, default=DEFAULT_EMBED_IFRAME ): cv.boolean, vol.Optional( CONF_TRUST_EXTERNAL_SCRIPT, default=DEFAULT_TRUST_EXTERNAL ): cv.boolean, vol.Optional(CONF_REQUIRE_ADMIN, default=False): cv.boolean, } ) ], ) }, extra=vol.ALLOW_EXTRA, ) @bind_hass async def async_register_panel( hass, # The url to serve the panel frontend_url_path, # The webcomponent name that loads your panel webcomponent_name, # Title/icon for sidebar sidebar_title=None, sidebar_icon=None, # HTML source of your panel html_url=None, # JS source of your panel js_url=None, # JS module of your panel module_url=None, # If your panel should be run inside an iframe embed_iframe=DEFAULT_EMBED_IFRAME, # Should user be asked for confirmation when loading external source trust_external=DEFAULT_TRUST_EXTERNAL, # Configuration to be passed to the panel config=None, # If your panel should only be shown to admin users require_admin=False, ): """Register a new custom panel.""" if js_url is None and html_url is None and module_url is None: raise ValueError("Either js_url, module_url or html_url is required.") if (js_url and html_url) or (module_url and html_url): raise ValueError("Pass in only one of JS url, Module url or HTML url.") if config is not None and not isinstance(config, dict): raise ValueError("Config needs to be a dictionary.") custom_panel_config = { "name": webcomponent_name, "embed_iframe": embed_iframe, "trust_external": trust_external, } if js_url is not None: custom_panel_config["js_url"] = js_url if module_url is not None: custom_panel_config["module_url"] = module_url if html_url is not None: custom_panel_config["html_url"] = html_url if config is not None: # Make copy because we're mutating it config = dict(config) else: config = {} config["_panel_custom"] = custom_panel_config hass.components.frontend.async_register_built_in_panel( component_name="custom", sidebar_title=sidebar_title, sidebar_icon=sidebar_icon, frontend_url_path=frontend_url_path, config=config, require_admin=require_admin, ) async def async_setup(hass, config): """Initialize custom panel.""" if DOMAIN not in config: return True success = False for panel in config[DOMAIN]: name = panel[CONF_COMPONENT_NAME] kwargs = { "webcomponent_name": panel[CONF_COMPONENT_NAME], "frontend_url_path": panel.get(CONF_URL_PATH, name), "sidebar_title": panel.get(CONF_SIDEBAR_TITLE), "sidebar_icon": panel.get(CONF_SIDEBAR_ICON), "config": panel.get(CONF_CONFIG), "trust_external": panel[CONF_TRUST_EXTERNAL_SCRIPT], "embed_iframe": panel[CONF_EMBED_IFRAME], "require_admin": panel[CONF_REQUIRE_ADMIN], } panel_path = panel.get(CONF_WEBCOMPONENT_PATH) if panel_path is None: panel_path = hass.config.path(PANEL_DIR, f"{name}.html") if CONF_JS_URL in panel: kwargs["js_url"] = panel[CONF_JS_URL] elif CONF_MODULE_URL in panel: kwargs["module_url"] = panel[CONF_MODULE_URL] elif not await hass.async_add_job(os.path.isfile, panel_path): _LOGGER.error("Unable to find webcomponent for %s: %s", name, panel_path) continue else: url = LEGACY_URL.format(name) hass.http.register_static_path(url, panel_path) kwargs["html_url"] = url await async_register_panel(hass, **kwargs) success = True return success
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/panel_custom/__init__.py
"""Support for Danfoss Air HRV.""" from datetime import timedelta import logging import voluptuous as vol from homeassistant.const import CONF_HOST from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) DANFOSS_AIR_PLATFORMS = ["sensor", "binary_sensor", "switch"] DOMAIN = "danfoss_air" MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60) CONFIG_SCHEMA = vol.Schema( {DOMAIN: vol.Schema({vol.Required(CONF_HOST): cv.string})}, extra=vol.ALLOW_EXTRA ) def setup(hass, config): """Set up the Danfoss Air component.""" conf = config[DOMAIN] hass.data[DOMAIN] = DanfossAir(conf[CONF_HOST]) for platform in DANFOSS_AIR_PLATFORMS: discovery.load_platform(hass, platform, DOMAIN, {}, config) return True class DanfossAir: """Handle all communication with Danfoss Air CCM unit.""" def __init__(self, host): """Initialize the Danfoss Air CCM connection.""" self._data = {} from pydanfossair.danfossclient import DanfossClient self._client = DanfossClient(host) def get_value(self, item): """Get value for sensor.""" return self._data.get(item) def update_state(self, command, state_command): """Send update command to Danfoss Air CCM.""" self._data[state_command] = self._client.command(command) @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Use the data from Danfoss Air API.""" _LOGGER.debug("Fetching data from Danfoss Air CCM module") from pydanfossair.commands import ReadCommand self._data[ReadCommand.exhaustTemperature] = self._client.command( ReadCommand.exhaustTemperature ) self._data[ReadCommand.outdoorTemperature] = self._client.command( ReadCommand.outdoorTemperature ) self._data[ReadCommand.supplyTemperature] = self._client.command( ReadCommand.supplyTemperature ) self._data[ReadCommand.extractTemperature] = self._client.command( ReadCommand.extractTemperature ) self._data[ReadCommand.humidity] = round( self._client.command(ReadCommand.humidity), 2 ) self._data[ReadCommand.filterPercent] = round( self._client.command(ReadCommand.filterPercent), 2 ) self._data[ReadCommand.bypass] = self._client.command(ReadCommand.bypass) self._data[ReadCommand.fan_step] = self._client.command(ReadCommand.fan_step) self._data[ReadCommand.supply_fan_speed] = self._client.command( ReadCommand.supply_fan_speed ) self._data[ReadCommand.exhaust_fan_speed] = self._client.command( ReadCommand.exhaust_fan_speed ) self._data[ReadCommand.away_mode] = self._client.command(ReadCommand.away_mode) self._data[ReadCommand.boost] = self._client.command(ReadCommand.boost) self._data[ReadCommand.battery_percent] = self._client.command( ReadCommand.battery_percent ) self._data[ReadCommand.bypass] = self._client.command(ReadCommand.bypass) self._data[ReadCommand.automatic_bypass] = self._client.command( ReadCommand.automatic_bypass ) _LOGGER.debug("Done fetching data from Danfoss Air CCM module")
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/danfoss_air/__init__.py
"""Config flow to configure the OpenUV component.""" import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_API_KEY, CONF_ELEVATION, CONF_LATITUDE, CONF_LONGITUDE, ) from homeassistant.core import callback from homeassistant.helpers import aiohttp_client, config_validation as cv from .const import DOMAIN @callback def configured_instances(hass): """Return a set of configured OpenUV instances.""" return set( "{0}, {1}".format( entry.data.get(CONF_LATITUDE, hass.config.latitude), entry.data.get(CONF_LONGITUDE, hass.config.longitude), ) for entry in hass.config_entries.async_entries(DOMAIN) ) @config_entries.HANDLERS.register(DOMAIN) class OpenUvFlowHandler(config_entries.ConfigFlow): """Handle an OpenUV config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL def __init__(self): """Initialize the config flow.""" pass async def _show_form(self, errors=None): """Show the form to the user.""" data_schema = vol.Schema( { vol.Required(CONF_API_KEY): str, vol.Optional(CONF_LATITUDE): cv.latitude, vol.Optional(CONF_LONGITUDE): cv.longitude, vol.Optional(CONF_ELEVATION): vol.Coerce(float), } ) return self.async_show_form( step_id="user", data_schema=data_schema, errors=errors if errors else {} ) async def async_step_import(self, import_config): """Import a config entry from configuration.yaml.""" return await self.async_step_user(import_config) async def async_step_user(self, user_input=None): """Handle the start of the config flow.""" from pyopenuv import Client from pyopenuv.errors import OpenUvError if not user_input: return await self._show_form() identifier = "{0}, {1}".format( user_input.get(CONF_LATITUDE, self.hass.config.latitude), user_input.get(CONF_LONGITUDE, self.hass.config.longitude), ) if identifier in configured_instances(self.hass): return await self._show_form({CONF_LATITUDE: "identifier_exists"}) websession = aiohttp_client.async_get_clientsession(self.hass) client = Client(user_input[CONF_API_KEY], 0, 0, websession) try: await client.uv_index() except OpenUvError: return await self._show_form({CONF_API_KEY: "invalid_api_key"}) return self.async_create_entry(title=identifier, data=user_input)
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/openuv/config_flow.py
"""Support for MQTT room presence detection.""" import logging import json from datetime import timedelta import voluptuous as vol from homeassistant.components import mqtt import homeassistant.helpers.config_validation as cv from homeassistant.components.mqtt import CONF_STATE_TOPIC from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONF_NAME, CONF_TIMEOUT, STATE_NOT_HOME, ATTR_ID from homeassistant.core import callback from homeassistant.helpers.entity import Entity from homeassistant.util import dt, slugify _LOGGER = logging.getLogger(__name__) ATTR_DEVICE_ID = "device_id" ATTR_DISTANCE = "distance" ATTR_ROOM = "room" CONF_DEVICE_ID = "device_id" CONF_AWAY_TIMEOUT = "away_timeout" DEFAULT_AWAY_TIMEOUT = 0 DEFAULT_NAME = "Room Sensor" DEFAULT_TIMEOUT = 5 DEFAULT_TOPIC = "room_presence" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_DEVICE_ID): cv.string, vol.Required(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int, vol.Optional(CONF_AWAY_TIMEOUT, default=DEFAULT_AWAY_TIMEOUT): cv.positive_int, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ).extend(mqtt.MQTT_RO_PLATFORM_SCHEMA.schema) MQTT_PAYLOAD = vol.Schema( vol.All( json.loads, vol.Schema( { vol.Required(ATTR_ID): cv.string, vol.Required(ATTR_DISTANCE): vol.Coerce(float), }, extra=vol.ALLOW_EXTRA, ), ) ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up MQTT room Sensor.""" async_add_entities( [ MQTTRoomSensor( config.get(CONF_NAME), config.get(CONF_STATE_TOPIC), config.get(CONF_DEVICE_ID), config.get(CONF_TIMEOUT), config.get(CONF_AWAY_TIMEOUT), ) ] ) class MQTTRoomSensor(Entity): """Representation of a room sensor that is updated via MQTT.""" def __init__(self, name, state_topic, device_id, timeout, consider_home): """Initialize the sensor.""" self._state = STATE_NOT_HOME self._name = name self._state_topic = "{}{}".format(state_topic, "/+") self._device_id = slugify(device_id).upper() self._timeout = timeout self._consider_home = ( timedelta(seconds=consider_home) if consider_home else None ) self._distance = None self._updated = None async def async_added_to_hass(self): """Subscribe to MQTT events.""" @callback def update_state(device_id, room, distance): """Update the sensor state.""" self._state = room self._distance = distance self._updated = dt.utcnow() self.async_schedule_update_ha_state() @callback def message_received(msg): """Handle new MQTT messages.""" try: data = MQTT_PAYLOAD(msg.payload) except vol.MultipleInvalid as error: _LOGGER.debug("Skipping update because of malformatted data: %s", error) return device = _parse_update_data(msg.topic, data) if device.get(CONF_DEVICE_ID) == self._device_id: if self._distance is None or self._updated is None: update_state(**device) else: # update if: # device is in the same room OR # device is closer to another room OR # last update from other room was too long ago timediff = dt.utcnow() - self._updated if ( device.get(ATTR_ROOM) == self._state or device.get(ATTR_DISTANCE) < self._distance or timediff.seconds >= self._timeout ): update_state(**device) return await mqtt.async_subscribe( self.hass, self._state_topic, message_received, 1 ) @property def name(self): """Return the name of the sensor.""" return self._name @property def device_state_attributes(self): """Return the state attributes.""" return {ATTR_DISTANCE: self._distance} @property def state(self): """Return the current room of the entity.""" return self._state def update(self): """Update the state for absent devices.""" if ( self._updated and self._consider_home and dt.utcnow() - self._updated > self._consider_home ): self._state = STATE_NOT_HOME def _parse_update_data(topic, data): """Parse the room presence update.""" parts = topic.split("/") room = parts[-1] device_id = slugify(data.get(ATTR_ID)).upper() distance = data.get("distance") parsed_data = {ATTR_DEVICE_ID: device_id, ATTR_ROOM: room, ATTR_DISTANCE: distance} return parsed_data
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/mqtt_room/sensor.py
"""Support for functionality to have conversations with Home Assistant.""" import logging import re import voluptuous as vol from homeassistant import core from homeassistant.components import http from homeassistant.components.cover import INTENT_CLOSE_COVER, INTENT_OPEN_COVER from homeassistant.components.http.data_validator import RequestDataValidator from homeassistant.const import EVENT_COMPONENT_LOADED from homeassistant.core import callback from homeassistant.helpers import config_validation as cv, intent from homeassistant.loader import bind_hass from homeassistant.setup import ATTR_COMPONENT from .util import create_matcher _LOGGER = logging.getLogger(__name__) ATTR_TEXT = "text" DOMAIN = "conversation" REGEX_TURN_COMMAND = re.compile(r"turn (?P<name>(?: |\w)+) (?P<command>\w+)") REGEX_TYPE = type(re.compile("")) UTTERANCES = { "cover": { INTENT_OPEN_COVER: ["Open [the] [a] [an] {name}[s]"], INTENT_CLOSE_COVER: ["Close [the] [a] [an] {name}[s]"], } } SERVICE_PROCESS = "process" SERVICE_PROCESS_SCHEMA = vol.Schema({vol.Required(ATTR_TEXT): cv.string}) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional("intents"): vol.Schema( {cv.string: vol.All(cv.ensure_list, [cv.string])} ) } ) }, extra=vol.ALLOW_EXTRA, ) @core.callback @bind_hass def async_register(hass, intent_type, utterances): """Register utterances and any custom intents. Registrations don't require conversations to be loaded. They will become active once the conversation component is loaded. """ intents = hass.data.get(DOMAIN) if intents is None: intents = hass.data[DOMAIN] = {} conf = intents.get(intent_type) if conf is None: conf = intents[intent_type] = [] for utterance in utterances: if isinstance(utterance, REGEX_TYPE): conf.append(utterance) else: conf.append(create_matcher(utterance)) async def async_setup(hass, config): """Register the process service.""" config = config.get(DOMAIN, {}) intents = hass.data.get(DOMAIN) if intents is None: intents = hass.data[DOMAIN] = {} for intent_type, utterances in config.get("intents", {}).items(): conf = intents.get(intent_type) if conf is None: conf = intents[intent_type] = [] conf.extend(create_matcher(utterance) for utterance in utterances) async def process(service): """Parse text into commands.""" text = service.data[ATTR_TEXT] _LOGGER.debug("Processing: <%s>", text) try: await _process(hass, text) except intent.IntentHandleError as err: _LOGGER.error("Error processing %s: %s", text, err) hass.services.async_register( DOMAIN, SERVICE_PROCESS, process, schema=SERVICE_PROCESS_SCHEMA ) hass.http.register_view(ConversationProcessView) # We strip trailing 's' from name because our state matcher will fail # if a letter is not there. By removing 's' we can match singular and # plural names. async_register( hass, intent.INTENT_TURN_ON, ["Turn [the] [a] {name}[s] on", "Turn on [the] [a] [an] {name}[s]"], ) async_register( hass, intent.INTENT_TURN_OFF, ["Turn [the] [a] [an] {name}[s] off", "Turn off [the] [a] [an] {name}[s]"], ) async_register( hass, intent.INTENT_TOGGLE, ["Toggle [the] [a] [an] {name}[s]", "[the] [a] [an] {name}[s] toggle"], ) @callback def register_utterances(component): """Register utterances for a component.""" if component not in UTTERANCES: return for intent_type, sentences in UTTERANCES[component].items(): async_register(hass, intent_type, sentences) @callback def component_loaded(event): """Handle a new component loaded.""" register_utterances(event.data[ATTR_COMPONENT]) hass.bus.async_listen(EVENT_COMPONENT_LOADED, component_loaded) # Check already loaded components. for component in hass.config.components: register_utterances(component) return True async def _process(hass, text): """Process a line of text.""" intents = hass.data.get(DOMAIN, {}) for intent_type, matchers in intents.items(): for matcher in matchers: match = matcher.match(text) if not match: continue response = await hass.helpers.intent.async_handle( DOMAIN, intent_type, {key: {"value": value} for key, value in match.groupdict().items()}, text, ) return response class ConversationProcessView(http.HomeAssistantView): """View to retrieve shopping list content.""" url = "/api/conversation/process" name = "api:conversation:process" @RequestDataValidator(vol.Schema({vol.Required("text"): str})) async def post(self, request, data): """Send a request for processing.""" hass = request.app["hass"] try: intent_result = await _process(hass, data["text"]) except intent.IntentHandleError as err: intent_result = intent.IntentResponse() intent_result.async_set_speech(str(err)) if intent_result is None: intent_result = intent.IntentResponse() intent_result.async_set_speech("Sorry, I didn't understand that") return self.json(intent_result)
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/conversation/__init__.py
"""Support for Generic Modbus Thermostats.""" import logging import struct import voluptuous as vol from homeassistant.components.climate import PLATFORM_SCHEMA, ClimateDevice from homeassistant.components.climate.const import ( SUPPORT_TARGET_TEMPERATURE, HVAC_MODE_HEAT, ) from homeassistant.const import ATTR_TEMPERATURE, CONF_NAME, CONF_SLAVE import homeassistant.helpers.config_validation as cv from . import CONF_HUB, DEFAULT_HUB, DOMAIN as MODBUS_DOMAIN _LOGGER = logging.getLogger(__name__) CONF_TARGET_TEMP = "target_temp_register" CONF_CURRENT_TEMP = "current_temp_register" CONF_DATA_TYPE = "data_type" CONF_COUNT = "data_count" CONF_PRECISION = "precision" DATA_TYPE_INT = "int" DATA_TYPE_UINT = "uint" DATA_TYPE_FLOAT = "float" SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE HVAC_MODES = [HVAC_MODE_HEAT] PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_CURRENT_TEMP): cv.positive_int, vol.Required(CONF_NAME): cv.string, vol.Required(CONF_SLAVE): cv.positive_int, vol.Required(CONF_TARGET_TEMP): cv.positive_int, vol.Optional(CONF_COUNT, default=2): cv.positive_int, vol.Optional(CONF_DATA_TYPE, default=DATA_TYPE_FLOAT): vol.In( [DATA_TYPE_INT, DATA_TYPE_UINT, DATA_TYPE_FLOAT] ), vol.Optional(CONF_HUB, default=DEFAULT_HUB): cv.string, vol.Optional(CONF_PRECISION, default=1): cv.positive_int, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Modbus Thermostat Platform.""" name = config.get(CONF_NAME) modbus_slave = config.get(CONF_SLAVE) target_temp_register = config.get(CONF_TARGET_TEMP) current_temp_register = config.get(CONF_CURRENT_TEMP) data_type = config.get(CONF_DATA_TYPE) count = config.get(CONF_COUNT) precision = config.get(CONF_PRECISION) hub_name = config.get(CONF_HUB) hub = hass.data[MODBUS_DOMAIN][hub_name] add_entities( [ ModbusThermostat( hub, name, modbus_slave, target_temp_register, current_temp_register, data_type, count, precision, ) ], True, ) class ModbusThermostat(ClimateDevice): """Representation of a Modbus Thermostat.""" def __init__( self, hub, name, modbus_slave, target_temp_register, current_temp_register, data_type, count, precision, ): """Initialize the unit.""" self._hub = hub self._name = name self._slave = modbus_slave self._target_temperature_register = target_temp_register self._current_temperature_register = current_temp_register self._target_temperature = None self._current_temperature = None self._data_type = data_type self._count = int(count) self._precision = precision self._structure = ">f" data_types = { DATA_TYPE_INT: {1: "h", 2: "i", 4: "q"}, DATA_TYPE_UINT: {1: "H", 2: "I", 4: "Q"}, DATA_TYPE_FLOAT: {1: "e", 2: "f", 4: "d"}, } self._structure = ">{}".format(data_types[self._data_type][self._count]) @property def supported_features(self): """Return the list of supported features.""" return SUPPORT_FLAGS def update(self): """Update Target & Current Temperature.""" self._target_temperature = self.read_register(self._target_temperature_register) self._current_temperature = self.read_register( self._current_temperature_register ) @property def hvac_mode(self): """Return the current HVAC mode.""" return HVAC_MODE_HEAT @property def hvac_modes(self): """Return the possible HVAC modes.""" return HVAC_MODES @property def name(self): """Return the name of the climate device.""" return self._name @property def current_temperature(self): """Return the current temperature.""" return self._current_temperature @property def target_temperature(self): """Return the target temperature.""" return self._target_temperature def set_temperature(self, **kwargs): """Set new target temperature.""" target_temperature = kwargs.get(ATTR_TEMPERATURE) if target_temperature is None: return byte_string = struct.pack(self._structure, target_temperature) register_value = struct.unpack(">h", byte_string[0:2])[0] try: self.write_register(self._target_temperature_register, register_value) except AttributeError as ex: _LOGGER.error(ex) def read_register(self, register): """Read holding register using the Modbus hub slave.""" try: result = self._hub.read_holding_registers( self._slave, register, self._count ) except AttributeError as ex: _LOGGER.error(ex) byte_string = b"".join( [x.to_bytes(2, byteorder="big") for x in result.registers] ) val = struct.unpack(self._structure, byte_string)[0] register_value = format(val, f".{self._precision}f") return register_value def write_register(self, register, value): """Write register using the Modbus hub slave.""" self._hub.write_registers(self._slave, register, [value, 0])
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/modbus/climate.py
"""Offer time listening automation rules.""" import logging import voluptuous as vol from homeassistant.core import callback from homeassistant.const import CONF_AT, CONF_PLATFORM from homeassistant.helpers import config_validation as cv from homeassistant.helpers.event import async_track_time_change # mypy: allow-untyped-defs, no-check-untyped-defs _LOGGER = logging.getLogger(__name__) TRIGGER_SCHEMA = vol.Schema( {vol.Required(CONF_PLATFORM): "time", vol.Required(CONF_AT): cv.time} ) async def async_attach_trigger(hass, config, action, automation_info): """Listen for state changes based on configuration.""" at_time = config.get(CONF_AT) hours, minutes, seconds = at_time.hour, at_time.minute, at_time.second @callback def time_automation_listener(now): """Listen for time changes and calls action.""" hass.async_run_job(action, {"trigger": {"platform": "time", "now": now}}) return async_track_time_change( hass, time_automation_listener, hour=hours, minute=minutes, second=seconds )
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/automation/time.py
"""Support for interacting with and controlling the cmus music player.""" import logging import voluptuous as vol from homeassistant.components.media_player import MediaPlayerDevice, PLATFORM_SCHEMA from homeassistant.components.media_player.const import ( MEDIA_TYPE_MUSIC, MEDIA_TYPE_PLAYLIST, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_SEEK, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_SET, ) from homeassistant.const import ( CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT, STATE_OFF, STATE_PAUSED, STATE_PLAYING, ) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "cmus" DEFAULT_PORT = 3000 SUPPORT_CMUS = ( SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_TURN_OFF | SUPPORT_TURN_ON | SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_PLAY_MEDIA | SUPPORT_SEEK | SUPPORT_PLAY ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Inclusive(CONF_HOST, "remote"): cv.string, vol.Inclusive(CONF_PASSWORD, "remote"): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discover_info=None): """Set up the CMUS platform.""" from pycmus import exceptions host = config.get(CONF_HOST) password = config.get(CONF_PASSWORD) port = config.get(CONF_PORT) name = config.get(CONF_NAME) try: cmus_remote = CmusDevice(host, password, port, name) except exceptions.InvalidPassword: _LOGGER.error("The provided password was rejected by cmus") return False add_entities([cmus_remote], True) class CmusDevice(MediaPlayerDevice): """Representation of a running cmus.""" # pylint: disable=no-member def __init__(self, server, password, port, name): """Initialize the CMUS device.""" from pycmus import remote if server: self.cmus = remote.PyCmus(server=server, password=password, port=port) auto_name = f"cmus-{server}" else: self.cmus = remote.PyCmus() auto_name = "cmus-local" self._name = name or auto_name self.status = {} def update(self): """Get the latest data and update the state.""" status = self.cmus.get_status_dict() if not status: _LOGGER.warning("Received no status from cmus") else: self.status = status @property def name(self): """Return the name of the device.""" return self._name @property def state(self): """Return the media state.""" if self.status.get("status") == "playing": return STATE_PLAYING if self.status.get("status") == "paused": return STATE_PAUSED return STATE_OFF @property def media_content_id(self): """Content ID of current playing media.""" return self.status.get("file") @property def content_type(self): """Content type of the current playing media.""" return MEDIA_TYPE_MUSIC @property def media_duration(self): """Duration of current playing media in seconds.""" return self.status.get("duration") @property def media_title(self): """Title of current playing media.""" return self.status["tag"].get("title") @property def media_artist(self): """Artist of current playing media, music track only.""" return self.status["tag"].get("artist") @property def media_track(self): """Track number of current playing media, music track only.""" return self.status["tag"].get("tracknumber") @property def media_album_name(self): """Album name of current playing media, music track only.""" return self.status["tag"].get("album") @property def media_album_artist(self): """Album artist of current playing media, music track only.""" return self.status["tag"].get("albumartist") @property def volume_level(self): """Return the volume level.""" left = self.status["set"].get("vol_left")[0] right = self.status["set"].get("vol_right")[0] if left != right: volume = float(left + right) / 2 else: volume = left return int(volume) / 100 @property def supported_features(self): """Flag media player features that are supported.""" return SUPPORT_CMUS def turn_off(self): """Service to send the CMUS the command to stop playing.""" self.cmus.player_stop() def turn_on(self): """Service to send the CMUS the command to start playing.""" self.cmus.player_play() def set_volume_level(self, volume): """Set volume level, range 0..1.""" self.cmus.set_volume(int(volume * 100)) def volume_up(self): """Set the volume up.""" left = self.status["set"].get("vol_left") right = self.status["set"].get("vol_right") if left != right: current_volume = float(left + right) / 2 else: current_volume = left if current_volume <= 100: self.cmus.set_volume(int(current_volume) + 5) def volume_down(self): """Set the volume down.""" left = self.status["set"].get("vol_left") right = self.status["set"].get("vol_right") if left != right: current_volume = float(left + right) / 2 else: current_volume = left if current_volume <= 100: self.cmus.set_volume(int(current_volume) - 5) def play_media(self, media_type, media_id, **kwargs): """Send the play command.""" if media_type in [MEDIA_TYPE_MUSIC, MEDIA_TYPE_PLAYLIST]: self.cmus.player_play_file(media_id) else: _LOGGER.error( "Invalid media type %s. Only %s and %s are supported", media_type, MEDIA_TYPE_MUSIC, MEDIA_TYPE_PLAYLIST, ) def media_pause(self): """Send the pause command.""" self.cmus.player_pause() def media_next_track(self): """Send next track command.""" self.cmus.player_next() def media_previous_track(self): """Send next track command.""" self.cmus.player_prev() def media_seek(self, position): """Send seek command.""" self.cmus.seek(position) def media_play(self): """Send the play command.""" self.cmus.player_play() def media_stop(self): """Send the stop command.""" self.cmus.stop()
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/cmus/media_player.py
"""Support for Rheem EcoNet water heaters.""" import datetime import logging import voluptuous as vol from homeassistant.components.water_heater import ( DOMAIN, PLATFORM_SCHEMA, STATE_ECO, STATE_ELECTRIC, STATE_GAS, STATE_HEAT_PUMP, STATE_HIGH_DEMAND, STATE_OFF, STATE_PERFORMANCE, SUPPORT_OPERATION_MODE, SUPPORT_TARGET_TEMPERATURE, WaterHeaterDevice, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_TEMPERATURE, CONF_PASSWORD, CONF_USERNAME, TEMP_FAHRENHEIT, ) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) ATTR_VACATION_START = "next_vacation_start_date" ATTR_VACATION_END = "next_vacation_end_date" ATTR_ON_VACATION = "on_vacation" ATTR_TODAYS_ENERGY_USAGE = "todays_energy_usage" ATTR_IN_USE = "in_use" ATTR_START_DATE = "start_date" ATTR_END_DATE = "end_date" SUPPORT_FLAGS_HEATER = SUPPORT_TARGET_TEMPERATURE | SUPPORT_OPERATION_MODE SERVICE_ADD_VACATION = "econet_add_vacation" SERVICE_DELETE_VACATION = "econet_delete_vacation" ADD_VACATION_SCHEMA = vol.Schema( { vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Optional(ATTR_START_DATE): cv.positive_int, vol.Required(ATTR_END_DATE): cv.positive_int, } ) DELETE_VACATION_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.entity_ids}) ECONET_DATA = "econet" ECONET_STATE_TO_HA = { "Energy Saver": STATE_ECO, "gas": STATE_GAS, "High Demand": STATE_HIGH_DEMAND, "Off": STATE_OFF, "Performance": STATE_PERFORMANCE, "Heat Pump Only": STATE_HEAT_PUMP, "Electric-Only": STATE_ELECTRIC, "Electric": STATE_ELECTRIC, "Heat Pump": STATE_HEAT_PUMP, } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string} ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the EcoNet water heaters.""" from pyeconet.api import PyEcoNet hass.data[ECONET_DATA] = {} hass.data[ECONET_DATA]["water_heaters"] = [] username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) econet = PyEcoNet(username, password) water_heaters = econet.get_water_heaters() hass_water_heaters = [ EcoNetWaterHeater(water_heater) for water_heater in water_heaters ] add_entities(hass_water_heaters) hass.data[ECONET_DATA]["water_heaters"].extend(hass_water_heaters) def service_handle(service): """Handle the service calls.""" entity_ids = service.data.get("entity_id") all_heaters = hass.data[ECONET_DATA]["water_heaters"] _heaters = [ x for x in all_heaters if not entity_ids or x.entity_id in entity_ids ] for _water_heater in _heaters: if service.service == SERVICE_ADD_VACATION: start = service.data.get(ATTR_START_DATE) end = service.data.get(ATTR_END_DATE) _water_heater.add_vacation(start, end) if service.service == SERVICE_DELETE_VACATION: for vacation in _water_heater.water_heater.vacations: vacation.delete() _water_heater.schedule_update_ha_state(True) hass.services.register( DOMAIN, SERVICE_ADD_VACATION, service_handle, schema=ADD_VACATION_SCHEMA ) hass.services.register( DOMAIN, SERVICE_DELETE_VACATION, service_handle, schema=DELETE_VACATION_SCHEMA ) class EcoNetWaterHeater(WaterHeaterDevice): """Representation of an EcoNet water heater.""" def __init__(self, water_heater): """Initialize the water heater.""" self.water_heater = water_heater self.supported_modes = self.water_heater.supported_modes self.econet_state_to_ha = {} self.ha_state_to_econet = {} for mode in ECONET_STATE_TO_HA: if mode in self.supported_modes: self.econet_state_to_ha[mode] = ECONET_STATE_TO_HA.get(mode) for key, value in self.econet_state_to_ha.items(): self.ha_state_to_econet[value] = key for mode in self.supported_modes: if mode not in ECONET_STATE_TO_HA: error = ( "Invalid operation mode mapping. " + mode + " doesn't map. Please report this." ) _LOGGER.error(error) @property def name(self): """Return the device name.""" return self.water_heater.name @property def available(self): """Return if the the device is online or not.""" return self.water_heater.is_connected @property def temperature_unit(self): """Return the unit of measurement.""" return TEMP_FAHRENHEIT @property def device_state_attributes(self): """Return the optional device state attributes.""" data = {} vacations = self.water_heater.get_vacations() if vacations: data[ATTR_VACATION_START] = vacations[0].start_date data[ATTR_VACATION_END] = vacations[0].end_date data[ATTR_ON_VACATION] = self.water_heater.is_on_vacation todays_usage = self.water_heater.total_usage_for_today if todays_usage: data[ATTR_TODAYS_ENERGY_USAGE] = todays_usage data[ATTR_IN_USE] = self.water_heater.in_use return data @property def current_operation(self): """ Return current operation as one of the following. ["eco", "heat_pump", "high_demand", "electric_only"] """ current_op = self.econet_state_to_ha.get(self.water_heater.mode) return current_op @property def operation_list(self): """List of available operation modes.""" op_list = [] for mode in self.supported_modes: ha_mode = self.econet_state_to_ha.get(mode) if ha_mode is not None: op_list.append(ha_mode) return op_list @property def supported_features(self): """Return the list of supported features.""" return SUPPORT_FLAGS_HEATER def set_temperature(self, **kwargs): """Set new target temperature.""" target_temp = kwargs.get(ATTR_TEMPERATURE) if target_temp is not None: self.water_heater.set_target_set_point(target_temp) else: _LOGGER.error("A target temperature must be provided") def set_operation_mode(self, operation_mode): """Set operation mode.""" op_mode_to_set = self.ha_state_to_econet.get(operation_mode) if op_mode_to_set is not None: self.water_heater.set_mode(op_mode_to_set) else: _LOGGER.error("An operation mode must be provided") def add_vacation(self, start, end): """Add a vacation to this water heater.""" if not start: start = datetime.datetime.now() else: start = datetime.datetime.fromtimestamp(start) end = datetime.datetime.fromtimestamp(end) self.water_heater.set_vacation_mode(start, end) def update(self): """Get the latest date.""" self.water_heater.update_state() @property def target_temperature(self): """Return the temperature we try to reach.""" return self.water_heater.set_point @property def min_temp(self): """Return the minimum temperature.""" return self.water_heater.min_set_point @property def max_temp(self): """Return the maximum temperature.""" return self.water_heater.max_set_point
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/econet/water_heater.py
"""Support for AlarmDecoder devices.""" import logging from datetime import timedelta import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.const import EVENT_HOMEASSISTANT_STOP, CONF_HOST from homeassistant.helpers.discovery import load_platform from homeassistant.util import dt as dt_util from homeassistant.components.binary_sensor import DEVICE_CLASSES_SCHEMA _LOGGER = logging.getLogger(__name__) DOMAIN = "alarmdecoder" DATA_AD = "alarmdecoder" CONF_DEVICE = "device" CONF_DEVICE_BAUD = "baudrate" CONF_DEVICE_PATH = "path" CONF_DEVICE_PORT = "port" CONF_DEVICE_TYPE = "type" CONF_PANEL_DISPLAY = "panel_display" CONF_ZONE_NAME = "name" CONF_ZONE_TYPE = "type" CONF_ZONE_LOOP = "loop" CONF_ZONE_RFID = "rfid" CONF_ZONES = "zones" CONF_RELAY_ADDR = "relayaddr" CONF_RELAY_CHAN = "relaychan" DEFAULT_DEVICE_TYPE = "socket" DEFAULT_DEVICE_HOST = "localhost" DEFAULT_DEVICE_PORT = 10000 DEFAULT_DEVICE_PATH = "/dev/ttyUSB0" DEFAULT_DEVICE_BAUD = 115200 DEFAULT_PANEL_DISPLAY = False DEFAULT_ZONE_TYPE = "opening" SIGNAL_PANEL_MESSAGE = "alarmdecoder.panel_message" SIGNAL_PANEL_ARM_AWAY = "alarmdecoder.panel_arm_away" SIGNAL_PANEL_ARM_HOME = "alarmdecoder.panel_arm_home" SIGNAL_PANEL_DISARM = "alarmdecoder.panel_disarm" SIGNAL_ZONE_FAULT = "alarmdecoder.zone_fault" SIGNAL_ZONE_RESTORE = "alarmdecoder.zone_restore" SIGNAL_RFX_MESSAGE = "alarmdecoder.rfx_message" SIGNAL_REL_MESSAGE = "alarmdecoder.rel_message" DEVICE_SOCKET_SCHEMA = vol.Schema( { vol.Required(CONF_DEVICE_TYPE): "socket", vol.Optional(CONF_HOST, default=DEFAULT_DEVICE_HOST): cv.string, vol.Optional(CONF_DEVICE_PORT, default=DEFAULT_DEVICE_PORT): cv.port, } ) DEVICE_SERIAL_SCHEMA = vol.Schema( { vol.Required(CONF_DEVICE_TYPE): "serial", vol.Optional(CONF_DEVICE_PATH, default=DEFAULT_DEVICE_PATH): cv.string, vol.Optional(CONF_DEVICE_BAUD, default=DEFAULT_DEVICE_BAUD): cv.string, } ) DEVICE_USB_SCHEMA = vol.Schema({vol.Required(CONF_DEVICE_TYPE): "usb"}) ZONE_SCHEMA = vol.Schema( { vol.Required(CONF_ZONE_NAME): cv.string, vol.Optional(CONF_ZONE_TYPE, default=DEFAULT_ZONE_TYPE): vol.Any( DEVICE_CLASSES_SCHEMA ), vol.Optional(CONF_ZONE_RFID): cv.string, vol.Optional(CONF_ZONE_LOOP): vol.All(vol.Coerce(int), vol.Range(min=1, max=4)), vol.Inclusive( CONF_RELAY_ADDR, "relaylocation", "Relay address and channel must exist together", ): cv.byte, vol.Inclusive( CONF_RELAY_CHAN, "relaylocation", "Relay address and channel must exist together", ): cv.byte, } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_DEVICE): vol.Any( DEVICE_SOCKET_SCHEMA, DEVICE_SERIAL_SCHEMA, DEVICE_USB_SCHEMA ), vol.Optional( CONF_PANEL_DISPLAY, default=DEFAULT_PANEL_DISPLAY ): cv.boolean, vol.Optional(CONF_ZONES): {vol.Coerce(int): ZONE_SCHEMA}, } ) }, extra=vol.ALLOW_EXTRA, ) def setup(hass, config): """Set up for the AlarmDecoder devices.""" from alarmdecoder import AlarmDecoder from alarmdecoder.devices import SocketDevice, SerialDevice, USBDevice conf = config.get(DOMAIN) restart = False device = conf.get(CONF_DEVICE) display = conf.get(CONF_PANEL_DISPLAY) zones = conf.get(CONF_ZONES) device_type = device.get(CONF_DEVICE_TYPE) host = DEFAULT_DEVICE_HOST port = DEFAULT_DEVICE_PORT path = DEFAULT_DEVICE_PATH baud = DEFAULT_DEVICE_BAUD def stop_alarmdecoder(event): """Handle the shutdown of AlarmDecoder.""" _LOGGER.debug("Shutting down alarmdecoder") nonlocal restart restart = False controller.close() def open_connection(now=None): """Open a connection to AlarmDecoder.""" from alarmdecoder.util import NoDeviceError nonlocal restart try: controller.open(baud) except NoDeviceError: _LOGGER.debug("Failed to connect. Retrying in 5 seconds") hass.helpers.event.track_point_in_time( open_connection, dt_util.utcnow() + timedelta(seconds=5) ) return _LOGGER.debug("Established a connection with the alarmdecoder") restart = True def handle_closed_connection(event): """Restart after unexpected loss of connection.""" nonlocal restart if not restart: return restart = False _LOGGER.warning("AlarmDecoder unexpectedly lost connection.") hass.add_job(open_connection) def handle_message(sender, message): """Handle message from AlarmDecoder.""" hass.helpers.dispatcher.dispatcher_send(SIGNAL_PANEL_MESSAGE, message) def handle_rfx_message(sender, message): """Handle RFX message from AlarmDecoder.""" hass.helpers.dispatcher.dispatcher_send(SIGNAL_RFX_MESSAGE, message) def zone_fault_callback(sender, zone): """Handle zone fault from AlarmDecoder.""" hass.helpers.dispatcher.dispatcher_send(SIGNAL_ZONE_FAULT, zone) def zone_restore_callback(sender, zone): """Handle zone restore from AlarmDecoder.""" hass.helpers.dispatcher.dispatcher_send(SIGNAL_ZONE_RESTORE, zone) def handle_rel_message(sender, message): """Handle relay message from AlarmDecoder.""" hass.helpers.dispatcher.dispatcher_send(SIGNAL_REL_MESSAGE, message) controller = False if device_type == "socket": host = device.get(CONF_HOST) port = device.get(CONF_DEVICE_PORT) controller = AlarmDecoder(SocketDevice(interface=(host, port))) elif device_type == "serial": path = device.get(CONF_DEVICE_PATH) baud = device.get(CONF_DEVICE_BAUD) controller = AlarmDecoder(SerialDevice(interface=path)) elif device_type == "usb": AlarmDecoder(USBDevice.find()) return False controller.on_message += handle_message controller.on_rfx_message += handle_rfx_message controller.on_zone_fault += zone_fault_callback controller.on_zone_restore += zone_restore_callback controller.on_close += handle_closed_connection controller.on_relay_changed += handle_rel_message hass.data[DATA_AD] = controller open_connection() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_alarmdecoder) load_platform(hass, "alarm_control_panel", DOMAIN, conf, config) if zones: load_platform(hass, "binary_sensor", DOMAIN, {CONF_ZONES: zones}, config) if display: load_platform(hass, "sensor", DOMAIN, conf, config) return True
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/alarmdecoder/__init__.py
"""Simplepush notification service.""" import logging import voluptuous as vol from homeassistant.const import CONF_PASSWORD import homeassistant.helpers.config_validation as cv from homeassistant.components.notify import ( ATTR_TITLE, ATTR_TITLE_DEFAULT, PLATFORM_SCHEMA, BaseNotificationService, ) _LOGGER = logging.getLogger(__name__) ATTR_ENCRYPTED = "encrypted" CONF_DEVICE_KEY = "device_key" CONF_EVENT = "event" CONF_SALT = "salt" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_DEVICE_KEY): cv.string, vol.Optional(CONF_EVENT): cv.string, vol.Inclusive(CONF_PASSWORD, ATTR_ENCRYPTED): cv.string, vol.Inclusive(CONF_SALT, ATTR_ENCRYPTED): cv.string, } ) def get_service(hass, config, discovery_info=None): """Get the Simplepush notification service.""" return SimplePushNotificationService(config) class SimplePushNotificationService(BaseNotificationService): """Implementation of the notification service for Simplepush.""" def __init__(self, config): """Initialize the Simplepush notification service.""" self._device_key = config.get(CONF_DEVICE_KEY) self._event = config.get(CONF_EVENT) self._password = config.get(CONF_PASSWORD) self._salt = config.get(CONF_SALT) def send_message(self, message="", **kwargs): """Send a message to a Simplepush user.""" from simplepush import send, send_encrypted title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT) if self._password: send_encrypted( self._device_key, self._password, self._salt, title, message, event=self._event, ) else: send(self._device_key, title, message, event=self._event)
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/simplepush/notify.py
"""Support for the Opple light.""" import logging import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, PLATFORM_SCHEMA, SUPPORT_BRIGHTNESS, SUPPORT_COLOR_TEMP, Light, ) from homeassistant.const import CONF_HOST, CONF_NAME import homeassistant.helpers.config_validation as cv from homeassistant.util.color import ( color_temperature_kelvin_to_mired as kelvin_to_mired, color_temperature_mired_to_kelvin as mired_to_kelvin, ) _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "opple light" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Opple light platform.""" name = config[CONF_NAME] host = config[CONF_HOST] entity = OppleLight(name, host) add_entities([entity]) _LOGGER.debug("Init light %s %s", host, entity.unique_id) class OppleLight(Light): """Opple light device.""" def __init__(self, name, host): """Initialize an Opple light.""" from pyoppleio.OppleLightDevice import OppleLightDevice self._device = OppleLightDevice(host) self._name = name self._is_on = None self._brightness = None self._color_temp = None @property def available(self): """Return True if light is available.""" return self._device.is_online @property def unique_id(self): """Return unique ID for light.""" return self._device.mac @property def name(self): """Return the display name of this light.""" return self._name @property def is_on(self): """Return true if light is on.""" return self._is_on @property def brightness(self): """Return the brightness of the light.""" return self._brightness @property def color_temp(self): """Return the color temperature of this light.""" return kelvin_to_mired(self._color_temp) @property def min_mireds(self): """Return minimum supported color temperature.""" return 175 @property def max_mireds(self): """Return maximum supported color temperature.""" return 333 @property def supported_features(self): """Flag supported features.""" return SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP def turn_on(self, **kwargs): """Instruct the light to turn on.""" _LOGGER.debug("Turn on light %s %s", self._device.ip, kwargs) if not self.is_on: self._device.power_on = True if ATTR_BRIGHTNESS in kwargs and self.brightness != kwargs[ATTR_BRIGHTNESS]: self._device.brightness = kwargs[ATTR_BRIGHTNESS] if ATTR_COLOR_TEMP in kwargs and self.color_temp != kwargs[ATTR_COLOR_TEMP]: color_temp = mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]) self._device.color_temperature = color_temp def turn_off(self, **kwargs): """Instruct the light to turn off.""" self._device.power_on = False _LOGGER.debug("Turn off light %s", self._device.ip) def update(self): """Synchronize state with light.""" prev_available = self.available self._device.update() if ( prev_available == self.available and self._is_on == self._device.power_on and self._brightness == self._device.brightness and self._color_temp == self._device.color_temperature ): return if not self.available: _LOGGER.debug("Light %s is offline", self._device.ip) return self._is_on = self._device.power_on self._brightness = self._device.brightness self._color_temp = self._device.color_temperature if not self.is_on: _LOGGER.debug("Update light %s success: power off", self._device.ip) else: _LOGGER.debug( "Update light %s success: power on brightness %s " "color temperature %s", self._device.ip, self._brightness, self._color_temp, )
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/opple/light.py
"""Support for sending Wake-On-LAN magic packets.""" from functools import partial import logging import voluptuous as vol from homeassistant.const import CONF_MAC import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DOMAIN = "wake_on_lan" CONF_BROADCAST_ADDRESS = "broadcast_address" SERVICE_SEND_MAGIC_PACKET = "send_magic_packet" WAKE_ON_LAN_SEND_MAGIC_PACKET_SCHEMA = vol.Schema( {vol.Required(CONF_MAC): cv.string, vol.Optional(CONF_BROADCAST_ADDRESS): cv.string} ) async def async_setup(hass, config): """Set up the wake on LAN component.""" import wakeonlan async def send_magic_packet(call): """Send magic packet to wake up a device.""" mac_address = call.data.get(CONF_MAC) broadcast_address = call.data.get(CONF_BROADCAST_ADDRESS) _LOGGER.info( "Send magic packet to mac %s (broadcast: %s)", mac_address, broadcast_address, ) if broadcast_address is not None: await hass.async_add_job( partial( wakeonlan.send_magic_packet, mac_address, ip_address=broadcast_address, ) ) else: await hass.async_add_job(partial(wakeonlan.send_magic_packet, mac_address)) hass.services.async_register( DOMAIN, SERVICE_SEND_MAGIC_PACKET, send_magic_packet, schema=WAKE_ON_LAN_SEND_MAGIC_PACKET_SCHEMA, ) return True
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/wake_on_lan/__init__.py
"""Support for XS1 switches.""" import logging from xs1_api_client.api_constants import ActuatorType from homeassistant.helpers.entity import ToggleEntity from . import ACTUATORS, DOMAIN as COMPONENT_DOMAIN, XS1DeviceEntity _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the XS1 switch platform.""" actuators = hass.data[COMPONENT_DOMAIN][ACTUATORS] switch_entities = [] for actuator in actuators: if (actuator.type() == ActuatorType.SWITCH) or ( actuator.type() == ActuatorType.DIMMER ): switch_entities.append(XS1SwitchEntity(actuator)) add_entities(switch_entities) class XS1SwitchEntity(XS1DeviceEntity, ToggleEntity): """Representation of a XS1 switch actuator.""" @property def name(self): """Return the name of the device if any.""" return self.device.name() @property def is_on(self): """Return true if switch is on.""" return self.device.value() == 100 def turn_on(self, **kwargs): """Turn the device on.""" self.device.turn_on() def turn_off(self, **kwargs): """Turn the device off.""" self.device.turn_off()
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/xs1/switch.py
"""Device tracker helpers.""" import asyncio from typing import Dict, Any, Callable, Optional from types import ModuleType import attr from homeassistant.core import callback from homeassistant.setup import async_prepare_setup_platform from homeassistant.helpers import config_per_platform from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.typing import ConfigType, HomeAssistantType from homeassistant.helpers.event import async_track_time_interval from homeassistant.util import dt as dt_util from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE from .const import ( DOMAIN, PLATFORM_TYPE_LEGACY, CONF_SCAN_INTERVAL, SCAN_INTERVAL, SOURCE_TYPE_ROUTER, LOGGER, ) @attr.s class DeviceTrackerPlatform: """Class to hold platform information.""" LEGACY_SETUP = ( "async_get_scanner", "get_scanner", "async_setup_scanner", "setup_scanner", ) name = attr.ib(type=str) platform = attr.ib(type=ModuleType) config = attr.ib(type=Dict) @property def type(self): """Return platform type.""" for methods, platform_type in ((self.LEGACY_SETUP, PLATFORM_TYPE_LEGACY),): for meth in methods: if hasattr(self.platform, meth): return platform_type return None async def async_setup_legacy(self, hass, tracker, discovery_info=None): """Set up a legacy platform.""" LOGGER.info("Setting up %s.%s", DOMAIN, self.type) try: scanner = None setup = None if hasattr(self.platform, "async_get_scanner"): scanner = await self.platform.async_get_scanner( hass, {DOMAIN: self.config} ) elif hasattr(self.platform, "get_scanner"): scanner = await hass.async_add_job( self.platform.get_scanner, hass, {DOMAIN: self.config} ) elif hasattr(self.platform, "async_setup_scanner"): setup = await self.platform.async_setup_scanner( hass, self.config, tracker.async_see, discovery_info ) elif hasattr(self.platform, "setup_scanner"): setup = await hass.async_add_job( self.platform.setup_scanner, hass, self.config, tracker.see, discovery_info, ) else: raise HomeAssistantError("Invalid legacy device_tracker platform.") if scanner: async_setup_scanner_platform( hass, self.config, scanner, tracker.async_see, self.type ) return if not setup: LOGGER.error("Error setting up platform %s", self.type) return except Exception: # pylint: disable=broad-except LOGGER.exception("Error setting up platform %s", self.type) async def async_extract_config(hass, config): """Extract device tracker config and split between legacy and modern.""" legacy = [] for platform in await asyncio.gather( *( async_create_platform_type(hass, config, p_type, p_config) for p_type, p_config in config_per_platform(config, DOMAIN) ) ): if platform is None: continue if platform.type == PLATFORM_TYPE_LEGACY: legacy.append(platform) else: raise ValueError( "Unable to determine type for {}: {}".format( platform.name, platform.type ) ) return legacy async def async_create_platform_type( hass, config, p_type, p_config ) -> Optional[DeviceTrackerPlatform]: """Determine type of platform.""" platform = await async_prepare_setup_platform(hass, config, DOMAIN, p_type) if platform is None: return None return DeviceTrackerPlatform(p_type, platform, p_config) @callback def async_setup_scanner_platform( hass: HomeAssistantType, config: ConfigType, scanner: Any, async_see_device: Callable, platform: str, ): """Set up the connect scanner-based platform to device tracker. This method must be run in the event loop. """ interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL) update_lock = asyncio.Lock() scanner.hass = hass # Initial scan of each mac we also tell about host name for config seen: Any = set() async def async_device_tracker_scan(now: dt_util.dt.datetime): """Handle interval matches.""" if update_lock.locked(): LOGGER.warning( "Updating device list from %s took longer than the scheduled " "scan interval %s", platform, interval, ) return async with update_lock: found_devices = await scanner.async_scan_devices() for mac in found_devices: if mac in seen: host_name = None else: host_name = await scanner.async_get_device_name(mac) seen.add(mac) try: extra_attributes = await scanner.async_get_extra_attributes(mac) except NotImplementedError: extra_attributes = dict() kwargs = { "mac": mac, "host_name": host_name, "source_type": SOURCE_TYPE_ROUTER, "attributes": { "scanner": scanner.__class__.__name__, **extra_attributes, }, } zone_home = hass.states.get(hass.components.zone.ENTITY_ID_HOME) if zone_home: kwargs["gps"] = [ zone_home.attributes[ATTR_LATITUDE], zone_home.attributes[ATTR_LONGITUDE], ] kwargs["gps_accuracy"] = 0 hass.async_create_task(async_see_device(**kwargs)) async_track_time_interval(hass, async_device_tracker_scan, interval) hass.async_create_task(async_device_tracker_scan(None))
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/device_tracker/setup.py
"""Constants for the IGD component.""" import logging CONF_ENABLE_PORT_MAPPING = "port_mapping" CONF_ENABLE_SENSORS = "sensors" CONF_HASS = "hass" CONF_LOCAL_IP = "local_ip" CONF_PORTS = "ports" DOMAIN = "upnp" LOGGER = logging.getLogger(__package__) SIGNAL_REMOVE_SENSOR = "upnp_remove_sensor"
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/upnp/const.py
"""Support for the ZHA platform.""" import logging import time from homeassistant.components.device_tracker import DOMAIN, SOURCE_TYPE_ROUTER from homeassistant.components.device_tracker.config_entry import ScannerEntity from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from .core.const import ( CHANNEL_POWER_CONFIGURATION, DATA_ZHA, DATA_ZHA_DISPATCHERS, SIGNAL_ATTR_UPDATED, ZHA_DISCOVERY_NEW, ) from .entity import ZhaEntity from .sensor import battery_percentage_remaining_formatter _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Zigbee Home Automation device tracker from config entry.""" async def async_discover(discovery_info): await _async_setup_entities( hass, config_entry, async_add_entities, [discovery_info] ) unsub = async_dispatcher_connect( hass, ZHA_DISCOVERY_NEW.format(DOMAIN), async_discover ) hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub) device_trackers = hass.data.get(DATA_ZHA, {}).get(DOMAIN) if device_trackers is not None: await _async_setup_entities( hass, config_entry, async_add_entities, device_trackers.values() ) del hass.data[DATA_ZHA][DOMAIN] async def _async_setup_entities( hass, config_entry, async_add_entities, discovery_infos ): """Set up the ZHA device trackers.""" entities = [] for discovery_info in discovery_infos: entities.append(ZHADeviceScannerEntity(**discovery_info)) async_add_entities(entities, update_before_add=True) class ZHADeviceScannerEntity(ScannerEntity, ZhaEntity): """Represent a tracked device.""" def __init__(self, **kwargs): """Initialize the ZHA device tracker.""" super().__init__(**kwargs) self._battery_channel = self.cluster_channels.get(CHANNEL_POWER_CONFIGURATION) self._connected = False self._keepalive_interval = 60 self._should_poll = True self._battery_level = None async def async_added_to_hass(self): """Run when about to be added to hass.""" await super().async_added_to_hass() if self._battery_channel: await self.async_accept_signal( self._battery_channel, SIGNAL_ATTR_UPDATED, self.async_battery_percentage_remaining_updated, ) async def async_update(self): """Handle polling.""" if self.zha_device.last_seen is None: self._connected = False else: difference = time.time() - self.zha_device.last_seen if difference > self._keepalive_interval: self._connected = False else: self._connected = True @property def is_connected(self): """Return true if the device is connected to the network.""" return self._connected @property def source_type(self): """Return the source type, eg gps or router, of the device.""" return SOURCE_TYPE_ROUTER @callback def async_battery_percentage_remaining_updated(self, value): """Handle tracking.""" self.debug("battery_percentage_remaining updated: %s", value) self._connected = True self._battery_level = battery_percentage_remaining_formatter(value) self.async_schedule_update_ha_state() @property def battery_level(self): """Return the battery level of the device. Percentage from 0-100. """ return self._battery_level
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/zha/device_tracker.py
"""Support for Rain Bird Irrigation system LNK WiFi Module.""" import logging from pyrainbird import AvailableStations, RainbirdController import voluptuous as vol from homeassistant.components.switch import SwitchDevice from homeassistant.const import ATTR_ENTITY_ID, CONF_FRIENDLY_NAME, CONF_TRIGGER_TIME from homeassistant.helpers import config_validation as cv from . import CONF_ZONES, DATA_RAINBIRD, DOMAIN, RAINBIRD_CONTROLLER _LOGGER = logging.getLogger(__name__) ATTR_DURATION = "duration" SERVICE_START_IRRIGATION = "start_irrigation" SERVICE_SCHEMA_IRRIGATION = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_DURATION): vol.All(vol.Coerce(float), vol.Range(min=0)), } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up Rain Bird switches over a Rain Bird controller.""" if discovery_info is None: return controller: RainbirdController = hass.data[DATA_RAINBIRD][ discovery_info[RAINBIRD_CONTROLLER] ] available_stations: AvailableStations = controller.get_available_stations() if not (available_stations and available_stations.stations): return devices = [] for zone in range(1, available_stations.stations.count + 1): if available_stations.stations.active(zone): zone_config = discovery_info.get(CONF_ZONES, {}).get(zone, {}) time = zone_config.get(CONF_TRIGGER_TIME, discovery_info[CONF_TRIGGER_TIME]) name = zone_config.get(CONF_FRIENDLY_NAME) devices.append( RainBirdSwitch( controller, zone, time, name if name else "Sprinkler {}".format(zone), ) ) add_entities(devices, True) def start_irrigation(service): entity_id = service.data[ATTR_ENTITY_ID] duration = service.data[ATTR_DURATION] for device in devices: if device.entity_id == entity_id: device.turn_on(duration=duration) hass.services.register( DOMAIN, SERVICE_START_IRRIGATION, start_irrigation, schema=SERVICE_SCHEMA_IRRIGATION, ) class RainBirdSwitch(SwitchDevice): """Representation of a Rain Bird switch.""" def __init__(self, controller: RainbirdController, zone, time, name): """Initialize a Rain Bird Switch Device.""" self._rainbird = controller self._zone = zone self._name = name self._state = None self._duration = time self._attributes = {ATTR_DURATION: self._duration, "zone": self._zone} @property def device_state_attributes(self): """Return state attributes.""" return self._attributes @property def name(self): """Get the name of the switch.""" return self._name def turn_on(self, **kwargs): """Turn the switch on.""" if self._rainbird.irrigate_zone( int(self._zone), int(kwargs[ATTR_DURATION] if ATTR_DURATION in kwargs else self._duration), ): self._state = True def turn_off(self, **kwargs): """Turn the switch off.""" if self._rainbird.stop_irrigation(): self._state = False def update(self): """Update switch status.""" self._state = self._rainbird.get_zone_state(self._zone) @property def is_on(self): """Return true if switch is on.""" return self._state
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/components/rainbird/switch.py
"""Manage config entries in Home Assistant.""" import asyncio import logging import functools import uuid from typing import Any, Callable, List, Optional, Set import weakref import attr from homeassistant import data_entry_flow, loader from homeassistant.core import callback, HomeAssistant from homeassistant.exceptions import HomeAssistantError, ConfigEntryNotReady from homeassistant.setup import async_setup_component, async_process_deps_reqs from homeassistant.util.decorator import Registry from homeassistant.helpers import entity_registry # mypy: allow-untyped-defs _LOGGER = logging.getLogger(__name__) _UNDEF = object() SOURCE_USER = "user" SOURCE_DISCOVERY = "discovery" SOURCE_IMPORT = "import" HANDLERS = Registry() STORAGE_KEY = "core.config_entries" STORAGE_VERSION = 1 # Deprecated since 0.73 PATH_CONFIG = ".config_entries.json" SAVE_DELAY = 1 # The config entry has been set up successfully ENTRY_STATE_LOADED = "loaded" # There was an error while trying to set up this config entry ENTRY_STATE_SETUP_ERROR = "setup_error" # There was an error while trying to migrate the config entry to a new version ENTRY_STATE_MIGRATION_ERROR = "migration_error" # The config entry was not ready to be set up yet, but might be later ENTRY_STATE_SETUP_RETRY = "setup_retry" # The config entry has not been loaded ENTRY_STATE_NOT_LOADED = "not_loaded" # An error occurred when trying to unload the entry ENTRY_STATE_FAILED_UNLOAD = "failed_unload" UNRECOVERABLE_STATES = (ENTRY_STATE_MIGRATION_ERROR, ENTRY_STATE_FAILED_UNLOAD) DISCOVERY_NOTIFICATION_ID = "config_entry_discovery" DISCOVERY_SOURCES = ("ssdp", "zeroconf", SOURCE_DISCOVERY, SOURCE_IMPORT) EVENT_FLOW_DISCOVERED = "config_entry_discovered" CONN_CLASS_CLOUD_PUSH = "cloud_push" CONN_CLASS_CLOUD_POLL = "cloud_poll" CONN_CLASS_LOCAL_PUSH = "local_push" CONN_CLASS_LOCAL_POLL = "local_poll" CONN_CLASS_ASSUMED = "assumed" CONN_CLASS_UNKNOWN = "unknown" class ConfigError(HomeAssistantError): """Error while configuring an account.""" class UnknownEntry(ConfigError): """Unknown entry specified.""" class OperationNotAllowed(ConfigError): """Raised when a config entry operation is not allowed.""" class ConfigEntry: """Hold a configuration entry.""" __slots__ = ( "entry_id", "version", "domain", "title", "data", "options", "system_options", "source", "connection_class", "state", "_setup_lock", "update_listeners", "_async_cancel_retry_setup", ) def __init__( self, version: int, domain: str, title: str, data: dict, source: str, connection_class: str, system_options: dict, options: Optional[dict] = None, entry_id: Optional[str] = None, state: str = ENTRY_STATE_NOT_LOADED, ) -> None: """Initialize a config entry.""" # Unique id of the config entry self.entry_id = entry_id or uuid.uuid4().hex # Version of the configuration. self.version = version # Domain the configuration belongs to self.domain = domain # Title of the configuration self.title = title # Config data self.data = data # Entry options self.options = options or {} # Entry system options self.system_options = SystemOptions(**system_options) # Source of the configuration (user, discovery, cloud) self.source = source # Connection class self.connection_class = connection_class # State of the entry (LOADED, NOT_LOADED) self.state = state # Listeners to call on update self.update_listeners: List = [] # Function to cancel a scheduled retry self._async_cancel_retry_setup: Optional[Callable[[], Any]] = None async def async_setup( self, hass: HomeAssistant, *, integration: Optional[loader.Integration] = None, tries: int = 0, ) -> None: """Set up an entry.""" if integration is None: integration = await loader.async_get_integration(hass, self.domain) try: component = integration.get_component() except ImportError as err: _LOGGER.error( "Error importing integration %s to set up %s config entry: %s", integration.domain, self.domain, err, ) if self.domain == integration.domain: self.state = ENTRY_STATE_SETUP_ERROR return if self.domain == integration.domain: try: integration.get_platform("config_flow") except ImportError as err: _LOGGER.error( "Error importing platform config_flow from integration %s to set up %s config entry: %s", integration.domain, self.domain, err, ) self.state = ENTRY_STATE_SETUP_ERROR return # Perform migration if not await self.async_migrate(hass): self.state = ENTRY_STATE_MIGRATION_ERROR return try: result = await component.async_setup_entry( # type: ignore hass, self ) if not isinstance(result, bool): _LOGGER.error( "%s.async_setup_entry did not return boolean", integration.domain ) result = False except ConfigEntryNotReady: self.state = ENTRY_STATE_SETUP_RETRY wait_time = 2 ** min(tries, 4) * 5 tries += 1 _LOGGER.warning( "Config entry for %s not ready yet. Retrying in %d seconds.", self.domain, wait_time, ) async def setup_again(now): """Run setup again.""" self._async_cancel_retry_setup = None await self.async_setup(hass, integration=integration, tries=tries) self._async_cancel_retry_setup = hass.helpers.event.async_call_later( wait_time, setup_again ) return except Exception: # pylint: disable=broad-except _LOGGER.exception( "Error setting up entry %s for %s", self.title, integration.domain ) result = False # Only store setup result as state if it was not forwarded. if self.domain != integration.domain: return if result: self.state = ENTRY_STATE_LOADED else: self.state = ENTRY_STATE_SETUP_ERROR async def async_unload( self, hass: HomeAssistant, *, integration: Optional[loader.Integration] = None ) -> bool: """Unload an entry. Returns if unload is possible and was successful. """ if integration is None: integration = await loader.async_get_integration(hass, self.domain) component = integration.get_component() if integration.domain == self.domain: if self.state in UNRECOVERABLE_STATES: return False if self.state != ENTRY_STATE_LOADED: if self._async_cancel_retry_setup is not None: self._async_cancel_retry_setup() self._async_cancel_retry_setup = None self.state = ENTRY_STATE_NOT_LOADED return True supports_unload = hasattr(component, "async_unload_entry") if not supports_unload: if integration.domain == self.domain: self.state = ENTRY_STATE_FAILED_UNLOAD return False try: result = await component.async_unload_entry( # type: ignore hass, self ) assert isinstance(result, bool) # Only adjust state if we unloaded the component if result and integration.domain == self.domain: self.state = ENTRY_STATE_NOT_LOADED return result except Exception: # pylint: disable=broad-except _LOGGER.exception( "Error unloading entry %s for %s", self.title, integration.domain ) if integration.domain == self.domain: self.state = ENTRY_STATE_FAILED_UNLOAD return False async def async_remove(self, hass: HomeAssistant) -> None: """Invoke remove callback on component.""" integration = await loader.async_get_integration(hass, self.domain) component = integration.get_component() if not hasattr(component, "async_remove_entry"): return try: await component.async_remove_entry( # type: ignore hass, self ) except Exception: # pylint: disable=broad-except _LOGGER.exception( "Error calling entry remove callback %s for %s", self.title, integration.domain, ) async def async_migrate(self, hass: HomeAssistant) -> bool: """Migrate an entry. Returns True if config entry is up-to-date or has been migrated. """ handler = HANDLERS.get(self.domain) if handler is None: _LOGGER.error( "Flow handler not found for entry %s for %s", self.title, self.domain ) return False # Handler may be a partial while isinstance(handler, functools.partial): handler = handler.func if self.version == handler.VERSION: return True integration = await loader.async_get_integration(hass, self.domain) component = integration.get_component() supports_migrate = hasattr(component, "async_migrate_entry") if not supports_migrate: _LOGGER.error( "Migration handler not found for entry %s for %s", self.title, self.domain, ) return False try: result = await component.async_migrate_entry( # type: ignore hass, self ) if not isinstance(result, bool): _LOGGER.error( "%s.async_migrate_entry did not return boolean", self.domain ) return False if result: # pylint: disable=protected-access hass.config_entries._async_schedule_save() # type: ignore return result except Exception: # pylint: disable=broad-except _LOGGER.exception( "Error migrating entry %s for %s", self.title, self.domain ) return False def add_update_listener(self, listener: Callable) -> Callable: """Listen for when entry is updated. Listener: Callback function(hass, entry) Returns function to unlisten. """ weak_listener = weakref.ref(listener) self.update_listeners.append(weak_listener) return lambda: self.update_listeners.remove(weak_listener) def as_dict(self): """Return dictionary version of this entry.""" return { "entry_id": self.entry_id, "version": self.version, "domain": self.domain, "title": self.title, "data": self.data, "options": self.options, "system_options": self.system_options.as_dict(), "source": self.source, "connection_class": self.connection_class, } class ConfigEntries: """Manage the configuration entries. An instance of this object is available via `hass.config_entries`. """ def __init__(self, hass: HomeAssistant, hass_config: dict) -> None: """Initialize the entry manager.""" self.hass = hass self.flow = data_entry_flow.FlowManager( hass, self._async_create_flow, self._async_finish_flow ) self.options = OptionsFlowManager(hass) self._hass_config = hass_config self._entries: List[ConfigEntry] = [] self._store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY) EntityRegistryDisabledHandler(hass).async_setup() @callback def async_domains(self) -> List[str]: """Return domains for which we have entries.""" seen: Set[str] = set() result = [] for entry in self._entries: if entry.domain not in seen: seen.add(entry.domain) result.append(entry.domain) return result @callback def async_get_entry(self, entry_id: str) -> Optional[ConfigEntry]: """Return entry with matching entry_id.""" for entry in self._entries: if entry_id == entry.entry_id: return entry return None @callback def async_entries(self, domain: Optional[str] = None) -> List[ConfigEntry]: """Return all entries or entries for a specific domain.""" if domain is None: return list(self._entries) return [entry for entry in self._entries if entry.domain == domain] async def async_remove(self, entry_id): """Remove an entry.""" entry = self.async_get_entry(entry_id) if entry is None: raise UnknownEntry if entry.state in UNRECOVERABLE_STATES: unload_success = entry.state != ENTRY_STATE_FAILED_UNLOAD else: unload_success = await self.async_unload(entry_id) await entry.async_remove(self.hass) self._entries.remove(entry) self._async_schedule_save() dev_reg, ent_reg = await asyncio.gather( self.hass.helpers.device_registry.async_get_registry(), self.hass.helpers.entity_registry.async_get_registry(), ) dev_reg.async_clear_config_entry(entry_id) ent_reg.async_clear_config_entry(entry_id) return {"require_restart": not unload_success} async def async_initialize(self) -> None: """Initialize config entry config.""" # Migrating for config entries stored before 0.73 config = await self.hass.helpers.storage.async_migrator( self.hass.config.path(PATH_CONFIG), self._store, old_conf_migrate_func=_old_conf_migrator, ) if config is None: self._entries = [] return self._entries = [ ConfigEntry( version=entry["version"], domain=entry["domain"], entry_id=entry["entry_id"], data=entry["data"], source=entry["source"], title=entry["title"], # New in 0.79 connection_class=entry.get("connection_class", CONN_CLASS_UNKNOWN), # New in 0.89 options=entry.get("options"), # New in 0.98 system_options=entry.get("system_options", {}), ) for entry in config["entries"] ] async def async_setup(self, entry_id: str) -> bool: """Set up a config entry. Return True if entry has been successfully loaded. """ entry = self.async_get_entry(entry_id) if entry is None: raise UnknownEntry if entry.state != ENTRY_STATE_NOT_LOADED: raise OperationNotAllowed # Setup Component if not set up yet if entry.domain in self.hass.config.components: await entry.async_setup(self.hass) else: # Setting up the component will set up all its config entries result = await async_setup_component( self.hass, entry.domain, self._hass_config ) if not result: return result return entry.state == ENTRY_STATE_LOADED async def async_unload(self, entry_id: str) -> bool: """Unload a config entry.""" entry = self.async_get_entry(entry_id) if entry is None: raise UnknownEntry if entry.state in UNRECOVERABLE_STATES: raise OperationNotAllowed return await entry.async_unload(self.hass) async def async_reload(self, entry_id: str) -> bool: """Reload an entry. If an entry was not loaded, will just load. """ unload_result = await self.async_unload(entry_id) if not unload_result: return unload_result return await self.async_setup(entry_id) @callback def async_update_entry( self, entry, *, data=_UNDEF, options=_UNDEF, system_options=_UNDEF ): """Update a config entry.""" if data is not _UNDEF: entry.data = data if options is not _UNDEF: entry.options = options if system_options is not _UNDEF: entry.system_options.update(**system_options) for listener_ref in entry.update_listeners: listener = listener_ref() self.hass.async_create_task(listener(self.hass, entry)) self._async_schedule_save() async def async_forward_entry_setup(self, entry, domain): """Forward the setup of an entry to a different component. By default an entry is setup with the component it belongs to. If that component also has related platforms, the component will have to forward the entry to be setup by that component. You don't want to await this coroutine if it is called as part of the setup of a component, because it can cause a deadlock. """ # Setup Component if not set up yet if domain not in self.hass.config.components: result = await async_setup_component(self.hass, domain, self._hass_config) if not result: return False integration = await loader.async_get_integration(self.hass, domain) await entry.async_setup(self.hass, integration=integration) async def async_forward_entry_unload(self, entry, domain): """Forward the unloading of an entry to a different component.""" # It was never loaded. if domain not in self.hass.config.components: return True integration = await loader.async_get_integration(self.hass, domain) return await entry.async_unload(self.hass, integration=integration) async def _async_finish_flow(self, flow, result): """Finish a config flow and add an entry.""" # Remove notification if no other discovery config entries in progress if not any( ent["context"]["source"] in DISCOVERY_SOURCES for ent in self.hass.config_entries.flow.async_progress() if ent["flow_id"] != flow.flow_id ): self.hass.components.persistent_notification.async_dismiss( DISCOVERY_NOTIFICATION_ID ) if result["type"] != data_entry_flow.RESULT_TYPE_CREATE_ENTRY: return result entry = ConfigEntry( version=result["version"], domain=result["handler"], title=result["title"], data=result["data"], options={}, system_options={}, source=flow.context["source"], connection_class=flow.CONNECTION_CLASS, ) self._entries.append(entry) self._async_schedule_save() await self.async_setup(entry.entry_id) result["result"] = entry return result async def _async_create_flow(self, handler_key, *, context, data): """Create a flow for specified handler. Handler key is the domain of the component that we want to set up. """ try: integration = await loader.async_get_integration(self.hass, handler_key) except loader.IntegrationNotFound: _LOGGER.error("Cannot find integration %s", handler_key) raise data_entry_flow.UnknownHandler # Make sure requirements and dependencies of component are resolved await async_process_deps_reqs(self.hass, self._hass_config, integration) try: integration.get_platform("config_flow") except ImportError as err: _LOGGER.error( "Error occurred loading config flow for integration %s: %s", handler_key, err, ) raise data_entry_flow.UnknownHandler handler = HANDLERS.get(handler_key) if handler is None: raise data_entry_flow.UnknownHandler source = context["source"] # Create notification. if source in DISCOVERY_SOURCES: self.hass.bus.async_fire(EVENT_FLOW_DISCOVERED) self.hass.components.persistent_notification.async_create( title="New devices discovered", message=( "We have discovered new devices on your network. " "[Check it out](/config/integrations)" ), notification_id=DISCOVERY_NOTIFICATION_ID, ) flow = handler() flow.init_step = source return flow def _async_schedule_save(self) -> None: """Save the entity registry to a file.""" self._store.async_delay_save(self._data_to_save, SAVE_DELAY) @callback def _data_to_save(self): """Return data to save.""" return {"entries": [entry.as_dict() for entry in self._entries]} async def _old_conf_migrator(old_config): """Migrate the pre-0.73 config format to the latest version.""" return {"entries": old_config} class ConfigFlow(data_entry_flow.FlowHandler): """Base class for config flows with some helpers.""" def __init_subclass__(cls, domain=None, **kwargs): """Initialize a subclass, register if possible.""" super().__init_subclass__(**kwargs) # type: ignore if domain is not None: HANDLERS.register(domain)(cls) CONNECTION_CLASS = CONN_CLASS_UNKNOWN @staticmethod @callback def async_get_options_flow(config_entry): """Get the options flow for this handler.""" raise data_entry_flow.UnknownHandler @callback def _async_current_entries(self): """Return current entries.""" return self.hass.config_entries.async_entries(self.handler) @callback def _async_in_progress(self): """Return other in progress flows for current domain.""" return [ flw for flw in self.hass.config_entries.flow.async_progress() if flw["handler"] == self.handler and flw["flow_id"] != self.flow_id ] class OptionsFlowManager: """Flow to set options for a configuration entry.""" def __init__(self, hass: HomeAssistant) -> None: """Initialize the options manager.""" self.hass = hass self.flow = data_entry_flow.FlowManager( hass, self._async_create_flow, self._async_finish_flow ) async def _async_create_flow(self, entry_id, *, context, data): """Create an options flow for a config entry. Entry_id and flow.handler is the same thing to map entry with flow. """ entry = self.hass.config_entries.async_get_entry(entry_id) if entry is None: return if entry.domain not in HANDLERS: raise data_entry_flow.UnknownHandler flow = HANDLERS[entry.domain].async_get_options_flow(entry) return flow async def _async_finish_flow(self, flow, result): """Finish an options flow and update options for configuration entry. Flow.handler and entry_id is the same thing to map flow with entry. """ entry = self.hass.config_entries.async_get_entry(flow.handler) if entry is None: return self.hass.config_entries.async_update_entry(entry, options=result["data"]) result["result"] = True return result class OptionsFlow(data_entry_flow.FlowHandler): """Base class for config option flows.""" pass @attr.s(slots=True) class SystemOptions: """Config entry system options.""" disable_new_entities = attr.ib(type=bool, default=False) def update(self, *, disable_new_entities): """Update properties.""" self.disable_new_entities = disable_new_entities def as_dict(self): """Return dictionary version of this config entrys system options.""" return {"disable_new_entities": self.disable_new_entities} class EntityRegistryDisabledHandler: """Handler to handle when entities related to config entries updating disabled_by.""" RELOAD_AFTER_UPDATE_DELAY = 30 def __init__(self, hass: HomeAssistant) -> None: """Initialize the handler.""" self.hass = hass self.registry: Optional[entity_registry.EntityRegistry] = None self.changed: Set[str] = set() self._remove_call_later: Optional[Callable[[], None]] = None @callback def async_setup(self) -> None: """Set up the disable handler.""" self.hass.bus.async_listen( entity_registry.EVENT_ENTITY_REGISTRY_UPDATED, self._handle_entry_updated ) async def _handle_entry_updated(self, event): """Handle entity registry entry update.""" if ( event.data["action"] != "update" or "disabled_by" not in event.data["changes"] ): return if self.registry is None: self.registry = await entity_registry.async_get_registry(self.hass) entity_entry = self.registry.async_get(event.data["entity_id"]) if ( # Stop if no entry found entity_entry is None # Stop if entry not connected to config entry or entity_entry.config_entry_id is None # Stop if the entry got disabled. In that case the entity handles it # themselves. or entity_entry.disabled_by ): return config_entry = self.hass.config_entries.async_get_entry( entity_entry.config_entry_id ) if config_entry.entry_id not in self.changed and await support_entry_unload( self.hass, config_entry.domain ): self.changed.add(config_entry.entry_id) if not self.changed: return # We are going to delay reloading on *every* entity registry change so that # if a user is happily clicking along, it will only reload at the end. if self._remove_call_later: self._remove_call_later() self._remove_call_later = self.hass.helpers.event.async_call_later( self.RELOAD_AFTER_UPDATE_DELAY, self._handle_reload ) async def _handle_reload(self, _now): """Handle a reload.""" self._remove_call_later = None to_reload = self.changed self.changed = set() _LOGGER.info( "Reloading config entries because disabled_by changed in entity registry: %s", ", ".join(self.changed), ) await asyncio.gather( *[self.hass.config_entries.async_reload(entry_id) for entry_id in to_reload] ) async def support_entry_unload(hass: HomeAssistant, domain: str) -> bool: """Test if a domain supports entry unloading.""" integration = await loader.async_get_integration(hass, domain) component = integration.get_component() return hasattr(component, "async_unload_entry")
"""Define tests for the GeoNet NZ Quakes config flow.""" from datetime import timedelta import pytest from asynctest import patch, CoroutineMock from homeassistant import data_entry_flow from homeassistant.components.geonetnz_quakes import ( async_setup_entry, config_flow, CONF_MMI, CONF_MINIMUM_MAGNITUDE, DOMAIN, async_unload_entry, FEED, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_SCAN_INTERVAL, ) from tests.common import MockConfigEntry @pytest.fixture def config_entry(): """Create a mock GeoNet NZ Quakes config entry.""" return MockConfigEntry( domain=DOMAIN, data={ CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MMI: 4, CONF_MINIMUM_MAGNITUDE: 0.0, }, title="-41.2, 174.7", ) async def test_duplicate_error(hass, config_entry): """Test that errors are shown when duplicates are added.""" conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "identifier_exists"} async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_UNIT_SYSTEM: "metric", CONF_MMI: 2, CONF_SCAN_INTERVAL: timedelta(minutes=4), CONF_MINIMUM_MAGNITUDE: 2.5, } flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 2, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 240.0, CONF_MINIMUM_MAGNITUDE: 2.5, } async def test_step_user(hass): """Test that the user step works.""" hass.config.latitude = -41.2 hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25, CONF_MMI: 4} flow = config_flow.GeonetnzQuakesFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25, CONF_MMI: 4, CONF_UNIT_SYSTEM: "metric", CONF_SCAN_INTERVAL: 300.0, CONF_MINIMUM_MAGNITUDE: 0.0, } async def test_component_unload_config_entry(hass, config_entry): """Test that loading and unloading of a config entry works.""" config_entry.add_to_hass(hass) with patch( "aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update", new_callable=CoroutineMock, ) as mock_feed_manager_update: # Load config entry. assert await async_setup_entry(hass, config_entry) await hass.async_block_till_done() assert mock_feed_manager_update.call_count == 1 assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None # Unload config entry. assert await async_unload_entry(hass, config_entry) await hass.async_block_till_done() assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
Cinntax/home-assistant
tests/components/geonetnz_quakes/test_config_flow.py
homeassistant/config_entries.py
""" Find a few eigenvectors and eigenvalues of a matrix. Uses ARPACK: http://www.caam.rice.edu/software/ARPACK/ """ # Wrapper implementation notes # # ARPACK Entry Points # ------------------- # The entry points to ARPACK are # - (s,d)seupd : single and double precision symmetric matrix # - (s,d,c,z)neupd: single,double,complex,double complex general matrix # This wrapper puts the *neupd (general matrix) interfaces in eigs() # and the *seupd (symmetric matrix) in eigsh(). # There is no specialized interface for complex Hermitian matrices. # To find eigenvalues of a complex Hermitian matrix you # may use eigsh(), but eigsh() will simply call eigs() # and return the real part of the eigenvalues thus obtained. # Number of eigenvalues returned and complex eigenvalues # ------------------------------------------------------ # The ARPACK nonsymmetric real and double interface (s,d)naupd return # eigenvalues and eigenvectors in real (float,double) arrays. # Since the eigenvalues and eigenvectors are, in general, complex # ARPACK puts the real and imaginary parts in consecutive entries # in real-valued arrays. This wrapper puts the real entries # into complex data types and attempts to return the requested eigenvalues # and eigenvectors. # Solver modes # ------------ # ARPACK and handle shifted and shift-inverse computations # for eigenvalues by providing a shift (sigma) and a solver. __docformat__ = "restructuredtext en" __all__ = ['eigs', 'eigsh', 'svds', 'ArpackError', 'ArpackNoConvergence'] from . import _arpack arpack_int = _arpack.timing.nbx.dtype import numpy as np import warnings from scipy.sparse.linalg.interface import aslinearoperator, LinearOperator from scipy.sparse import eye, issparse, isspmatrix, isspmatrix_csr from scipy.linalg import eig, eigh, lu_factor, lu_solve from scipy.sparse.sputils import isdense, is_pydata_spmatrix from scipy.sparse.linalg import gmres, splu from scipy.sparse.linalg.eigen.lobpcg import lobpcg from scipy._lib._util import _aligned_zeros from scipy._lib._threadsafety import ReentrancyLock _type_conv = {'f': 's', 'd': 'd', 'F': 'c', 'D': 'z'} _ndigits = {'f': 5, 'd': 12, 'F': 5, 'D': 12} DNAUPD_ERRORS = { 0: "Normal exit.", 1: "Maximum number of iterations taken. " "All possible eigenvalues of OP has been found. IPARAM(5) " "returns the number of wanted converged Ritz values.", 2: "No longer an informational error. Deprecated starting " "with release 2 of ARPACK.", 3: "No shifts could be applied during a cycle of the " "Implicitly restarted Arnoldi iteration. One possibility " "is to increase the size of NCV relative to NEV. ", -1: "N must be positive.", -2: "NEV must be positive.", -3: "NCV-NEV >= 2 and less than or equal to N.", -4: "The maximum number of Arnoldi update iterations allowed " "must be greater than zero.", -5: " WHICH must be one of 'LM', 'SM', 'LR', 'SR', 'LI', 'SI'", -6: "BMAT must be one of 'I' or 'G'.", -7: "Length of private work array WORKL is not sufficient.", -8: "Error return from LAPACK eigenvalue calculation;", -9: "Starting vector is zero.", -10: "IPARAM(7) must be 1,2,3,4.", -11: "IPARAM(7) = 1 and BMAT = 'G' are incompatible.", -12: "IPARAM(1) must be equal to 0 or 1.", -13: "NEV and WHICH = 'BE' are incompatible.", -9999: "Could not build an Arnoldi factorization. " "IPARAM(5) returns the size of the current Arnoldi " "factorization. The user is advised to check that " "enough workspace and array storage has been allocated." } SNAUPD_ERRORS = DNAUPD_ERRORS ZNAUPD_ERRORS = DNAUPD_ERRORS.copy() ZNAUPD_ERRORS[-10] = "IPARAM(7) must be 1,2,3." CNAUPD_ERRORS = ZNAUPD_ERRORS DSAUPD_ERRORS = { 0: "Normal exit.", 1: "Maximum number of iterations taken. " "All possible eigenvalues of OP has been found.", 2: "No longer an informational error. Deprecated starting with " "release 2 of ARPACK.", 3: "No shifts could be applied during a cycle of the Implicitly " "restarted Arnoldi iteration. One possibility is to increase " "the size of NCV relative to NEV. ", -1: "N must be positive.", -2: "NEV must be positive.", -3: "NCV must be greater than NEV and less than or equal to N.", -4: "The maximum number of Arnoldi update iterations allowed " "must be greater than zero.", -5: "WHICH must be one of 'LM', 'SM', 'LA', 'SA' or 'BE'.", -6: "BMAT must be one of 'I' or 'G'.", -7: "Length of private work array WORKL is not sufficient.", -8: "Error return from trid. eigenvalue calculation; " "Informational error from LAPACK routine dsteqr .", -9: "Starting vector is zero.", -10: "IPARAM(7) must be 1,2,3,4,5.", -11: "IPARAM(7) = 1 and BMAT = 'G' are incompatible.", -12: "IPARAM(1) must be equal to 0 or 1.", -13: "NEV and WHICH = 'BE' are incompatible. ", -9999: "Could not build an Arnoldi factorization. " "IPARAM(5) returns the size of the current Arnoldi " "factorization. The user is advised to check that " "enough workspace and array storage has been allocated.", } SSAUPD_ERRORS = DSAUPD_ERRORS DNEUPD_ERRORS = { 0: "Normal exit.", 1: "The Schur form computed by LAPACK routine dlahqr " "could not be reordered by LAPACK routine dtrsen. " "Re-enter subroutine dneupd with IPARAM(5)NCV and " "increase the size of the arrays DR and DI to have " "dimension at least dimension NCV and allocate at least NCV " "columns for Z. NOTE: Not necessary if Z and V share " "the same space. Please notify the authors if this error" "occurs.", -1: "N must be positive.", -2: "NEV must be positive.", -3: "NCV-NEV >= 2 and less than or equal to N.", -5: "WHICH must be one of 'LM', 'SM', 'LR', 'SR', 'LI', 'SI'", -6: "BMAT must be one of 'I' or 'G'.", -7: "Length of private work WORKL array is not sufficient.", -8: "Error return from calculation of a real Schur form. " "Informational error from LAPACK routine dlahqr .", -9: "Error return from calculation of eigenvectors. " "Informational error from LAPACK routine dtrevc.", -10: "IPARAM(7) must be 1,2,3,4.", -11: "IPARAM(7) = 1 and BMAT = 'G' are incompatible.", -12: "HOWMNY = 'S' not yet implemented", -13: "HOWMNY must be one of 'A' or 'P' if RVEC = .true.", -14: "DNAUPD did not find any eigenvalues to sufficient " "accuracy.", -15: "DNEUPD got a different count of the number of converged " "Ritz values than DNAUPD got. This indicates the user " "probably made an error in passing data from DNAUPD to " "DNEUPD or that the data was modified before entering " "DNEUPD", } SNEUPD_ERRORS = DNEUPD_ERRORS.copy() SNEUPD_ERRORS[1] = ("The Schur form computed by LAPACK routine slahqr " "could not be reordered by LAPACK routine strsen . " "Re-enter subroutine dneupd with IPARAM(5)=NCV and " "increase the size of the arrays DR and DI to have " "dimension at least dimension NCV and allocate at least " "NCV columns for Z. NOTE: Not necessary if Z and V share " "the same space. Please notify the authors if this error " "occurs.") SNEUPD_ERRORS[-14] = ("SNAUPD did not find any eigenvalues to sufficient " "accuracy.") SNEUPD_ERRORS[-15] = ("SNEUPD got a different count of the number of " "converged Ritz values than SNAUPD got. This indicates " "the user probably made an error in passing data from " "SNAUPD to SNEUPD or that the data was modified before " "entering SNEUPD") ZNEUPD_ERRORS = {0: "Normal exit.", 1: "The Schur form computed by LAPACK routine csheqr " "could not be reordered by LAPACK routine ztrsen. " "Re-enter subroutine zneupd with IPARAM(5)=NCV and " "increase the size of the array D to have " "dimension at least dimension NCV and allocate at least " "NCV columns for Z. NOTE: Not necessary if Z and V share " "the same space. Please notify the authors if this error " "occurs.", -1: "N must be positive.", -2: "NEV must be positive.", -3: "NCV-NEV >= 1 and less than or equal to N.", -5: "WHICH must be one of 'LM', 'SM', 'LR', 'SR', 'LI', 'SI'", -6: "BMAT must be one of 'I' or 'G'.", -7: "Length of private work WORKL array is not sufficient.", -8: "Error return from LAPACK eigenvalue calculation. " "This should never happened.", -9: "Error return from calculation of eigenvectors. " "Informational error from LAPACK routine ztrevc.", -10: "IPARAM(7) must be 1,2,3", -11: "IPARAM(7) = 1 and BMAT = 'G' are incompatible.", -12: "HOWMNY = 'S' not yet implemented", -13: "HOWMNY must be one of 'A' or 'P' if RVEC = .true.", -14: "ZNAUPD did not find any eigenvalues to sufficient " "accuracy.", -15: "ZNEUPD got a different count of the number of " "converged Ritz values than ZNAUPD got. This " "indicates the user probably made an error in passing " "data from ZNAUPD to ZNEUPD or that the data was " "modified before entering ZNEUPD" } CNEUPD_ERRORS = ZNEUPD_ERRORS.copy() CNEUPD_ERRORS[-14] = ("CNAUPD did not find any eigenvalues to sufficient " "accuracy.") CNEUPD_ERRORS[-15] = ("CNEUPD got a different count of the number of " "converged Ritz values than CNAUPD got. This indicates " "the user probably made an error in passing data from " "CNAUPD to CNEUPD or that the data was modified before " "entering CNEUPD") DSEUPD_ERRORS = { 0: "Normal exit.", -1: "N must be positive.", -2: "NEV must be positive.", -3: "NCV must be greater than NEV and less than or equal to N.", -5: "WHICH must be one of 'LM', 'SM', 'LA', 'SA' or 'BE'.", -6: "BMAT must be one of 'I' or 'G'.", -7: "Length of private work WORKL array is not sufficient.", -8: ("Error return from trid. eigenvalue calculation; " "Information error from LAPACK routine dsteqr."), -9: "Starting vector is zero.", -10: "IPARAM(7) must be 1,2,3,4,5.", -11: "IPARAM(7) = 1 and BMAT = 'G' are incompatible.", -12: "NEV and WHICH = 'BE' are incompatible.", -14: "DSAUPD did not find any eigenvalues to sufficient accuracy.", -15: "HOWMNY must be one of 'A' or 'S' if RVEC = .true.", -16: "HOWMNY = 'S' not yet implemented", -17: ("DSEUPD got a different count of the number of converged " "Ritz values than DSAUPD got. This indicates the user " "probably made an error in passing data from DSAUPD to " "DSEUPD or that the data was modified before entering " "DSEUPD.") } SSEUPD_ERRORS = DSEUPD_ERRORS.copy() SSEUPD_ERRORS[-14] = ("SSAUPD did not find any eigenvalues " "to sufficient accuracy.") SSEUPD_ERRORS[-17] = ("SSEUPD got a different count of the number of " "converged " "Ritz values than SSAUPD got. This indicates the user " "probably made an error in passing data from SSAUPD to " "SSEUPD or that the data was modified before entering " "SSEUPD.") _SAUPD_ERRORS = {'d': DSAUPD_ERRORS, 's': SSAUPD_ERRORS} _NAUPD_ERRORS = {'d': DNAUPD_ERRORS, 's': SNAUPD_ERRORS, 'z': ZNAUPD_ERRORS, 'c': CNAUPD_ERRORS} _SEUPD_ERRORS = {'d': DSEUPD_ERRORS, 's': SSEUPD_ERRORS} _NEUPD_ERRORS = {'d': DNEUPD_ERRORS, 's': SNEUPD_ERRORS, 'z': ZNEUPD_ERRORS, 'c': CNEUPD_ERRORS} # accepted values of parameter WHICH in _SEUPD _SEUPD_WHICH = ['LM', 'SM', 'LA', 'SA', 'BE'] # accepted values of parameter WHICH in _NAUPD _NEUPD_WHICH = ['LM', 'SM', 'LR', 'SR', 'LI', 'SI'] class ArpackError(RuntimeError): """ ARPACK error """ def __init__(self, info, infodict=_NAUPD_ERRORS): msg = infodict.get(info, "Unknown error") RuntimeError.__init__(self, "ARPACK error %d: %s" % (info, msg)) class ArpackNoConvergence(ArpackError): """ ARPACK iteration did not converge Attributes ---------- eigenvalues : ndarray Partial result. Converged eigenvalues. eigenvectors : ndarray Partial result. Converged eigenvectors. """ def __init__(self, msg, eigenvalues, eigenvectors): ArpackError.__init__(self, -1, {-1: msg}) self.eigenvalues = eigenvalues self.eigenvectors = eigenvectors def choose_ncv(k): """ Choose number of lanczos vectors based on target number of singular/eigen values and vectors to compute, k. """ return max(2 * k + 1, 20) class _ArpackParams(object): def __init__(self, n, k, tp, mode=1, sigma=None, ncv=None, v0=None, maxiter=None, which="LM", tol=0): if k <= 0: raise ValueError("k must be positive, k=%d" % k) if maxiter is None: maxiter = n * 10 if maxiter <= 0: raise ValueError("maxiter must be positive, maxiter=%d" % maxiter) if tp not in 'fdFD': raise ValueError("matrix type must be 'f', 'd', 'F', or 'D'") if v0 is not None: # ARPACK overwrites its initial resid, make a copy self.resid = np.array(v0, copy=True) info = 1 else: # ARPACK will use a random initial vector. self.resid = np.zeros(n, tp) info = 0 if sigma is None: #sigma not used self.sigma = 0 else: self.sigma = sigma if ncv is None: ncv = choose_ncv(k) ncv = min(ncv, n) self.v = np.zeros((n, ncv), tp) # holds Ritz vectors self.iparam = np.zeros(11, arpack_int) # set solver mode and parameters ishfts = 1 self.mode = mode self.iparam[0] = ishfts self.iparam[2] = maxiter self.iparam[3] = 1 self.iparam[6] = mode self.n = n self.tol = tol self.k = k self.maxiter = maxiter self.ncv = ncv self.which = which self.tp = tp self.info = info self.converged = False self.ido = 0 def _raise_no_convergence(self): msg = "No convergence (%d iterations, %d/%d eigenvectors converged)" k_ok = self.iparam[4] num_iter = self.iparam[2] try: ev, vec = self.extract(True) except ArpackError as err: msg = "%s [%s]" % (msg, err) ev = np.zeros((0,)) vec = np.zeros((self.n, 0)) k_ok = 0 raise ArpackNoConvergence(msg % (num_iter, k_ok, self.k), ev, vec) class _SymmetricArpackParams(_ArpackParams): def __init__(self, n, k, tp, matvec, mode=1, M_matvec=None, Minv_matvec=None, sigma=None, ncv=None, v0=None, maxiter=None, which="LM", tol=0): # The following modes are supported: # mode = 1: # Solve the standard eigenvalue problem: # A*x = lambda*x : # A - symmetric # Arguments should be # matvec = left multiplication by A # M_matvec = None [not used] # Minv_matvec = None [not used] # # mode = 2: # Solve the general eigenvalue problem: # A*x = lambda*M*x # A - symmetric # M - symmetric positive definite # Arguments should be # matvec = left multiplication by A # M_matvec = left multiplication by M # Minv_matvec = left multiplication by M^-1 # # mode = 3: # Solve the general eigenvalue problem in shift-invert mode: # A*x = lambda*M*x # A - symmetric # M - symmetric positive semi-definite # Arguments should be # matvec = None [not used] # M_matvec = left multiplication by M # or None, if M is the identity # Minv_matvec = left multiplication by [A-sigma*M]^-1 # # mode = 4: # Solve the general eigenvalue problem in Buckling mode: # A*x = lambda*AG*x # A - symmetric positive semi-definite # AG - symmetric indefinite # Arguments should be # matvec = left multiplication by A # M_matvec = None [not used] # Minv_matvec = left multiplication by [A-sigma*AG]^-1 # # mode = 5: # Solve the general eigenvalue problem in Cayley-transformed mode: # A*x = lambda*M*x # A - symmetric # M - symmetric positive semi-definite # Arguments should be # matvec = left multiplication by A # M_matvec = left multiplication by M # or None, if M is the identity # Minv_matvec = left multiplication by [A-sigma*M]^-1 if mode == 1: if matvec is None: raise ValueError("matvec must be specified for mode=1") if M_matvec is not None: raise ValueError("M_matvec cannot be specified for mode=1") if Minv_matvec is not None: raise ValueError("Minv_matvec cannot be specified for mode=1") self.OP = matvec self.B = lambda x: x self.bmat = 'I' elif mode == 2: if matvec is None: raise ValueError("matvec must be specified for mode=2") if M_matvec is None: raise ValueError("M_matvec must be specified for mode=2") if Minv_matvec is None: raise ValueError("Minv_matvec must be specified for mode=2") self.OP = lambda x: Minv_matvec(matvec(x)) self.OPa = Minv_matvec self.OPb = matvec self.B = M_matvec self.bmat = 'G' elif mode == 3: if matvec is not None: raise ValueError("matvec must not be specified for mode=3") if Minv_matvec is None: raise ValueError("Minv_matvec must be specified for mode=3") if M_matvec is None: self.OP = Minv_matvec self.OPa = Minv_matvec self.B = lambda x: x self.bmat = 'I' else: self.OP = lambda x: Minv_matvec(M_matvec(x)) self.OPa = Minv_matvec self.B = M_matvec self.bmat = 'G' elif mode == 4: if matvec is None: raise ValueError("matvec must be specified for mode=4") if M_matvec is not None: raise ValueError("M_matvec must not be specified for mode=4") if Minv_matvec is None: raise ValueError("Minv_matvec must be specified for mode=4") self.OPa = Minv_matvec self.OP = lambda x: self.OPa(matvec(x)) self.B = matvec self.bmat = 'G' elif mode == 5: if matvec is None: raise ValueError("matvec must be specified for mode=5") if Minv_matvec is None: raise ValueError("Minv_matvec must be specified for mode=5") self.OPa = Minv_matvec self.A_matvec = matvec if M_matvec is None: self.OP = lambda x: Minv_matvec(matvec(x) + sigma * x) self.B = lambda x: x self.bmat = 'I' else: self.OP = lambda x: Minv_matvec(matvec(x) + sigma * M_matvec(x)) self.B = M_matvec self.bmat = 'G' else: raise ValueError("mode=%i not implemented" % mode) if which not in _SEUPD_WHICH: raise ValueError("which must be one of %s" % ' '.join(_SEUPD_WHICH)) if k >= n: raise ValueError("k must be less than ndim(A), k=%d" % k) _ArpackParams.__init__(self, n, k, tp, mode, sigma, ncv, v0, maxiter, which, tol) if self.ncv > n or self.ncv <= k: raise ValueError("ncv must be k<ncv<=n, ncv=%s" % self.ncv) # Use _aligned_zeros to work around a f2py bug in Numpy 1.9.1 self.workd = _aligned_zeros(3 * n, self.tp) self.workl = _aligned_zeros(self.ncv * (self.ncv + 8), self.tp) ltr = _type_conv[self.tp] if ltr not in ["s", "d"]: raise ValueError("Input matrix is not real-valued.") self._arpack_solver = _arpack.__dict__[ltr + 'saupd'] self._arpack_extract = _arpack.__dict__[ltr + 'seupd'] self.iterate_infodict = _SAUPD_ERRORS[ltr] self.extract_infodict = _SEUPD_ERRORS[ltr] self.ipntr = np.zeros(11, arpack_int) def iterate(self): self.ido, self.tol, self.resid, self.v, self.iparam, self.ipntr, self.info = \ self._arpack_solver(self.ido, self.bmat, self.which, self.k, self.tol, self.resid, self.v, self.iparam, self.ipntr, self.workd, self.workl, self.info) xslice = slice(self.ipntr[0] - 1, self.ipntr[0] - 1 + self.n) yslice = slice(self.ipntr[1] - 1, self.ipntr[1] - 1 + self.n) if self.ido == -1: # initialization self.workd[yslice] = self.OP(self.workd[xslice]) elif self.ido == 1: # compute y = Op*x if self.mode == 1: self.workd[yslice] = self.OP(self.workd[xslice]) elif self.mode == 2: self.workd[xslice] = self.OPb(self.workd[xslice]) self.workd[yslice] = self.OPa(self.workd[xslice]) elif self.mode == 5: Bxslice = slice(self.ipntr[2] - 1, self.ipntr[2] - 1 + self.n) Ax = self.A_matvec(self.workd[xslice]) self.workd[yslice] = self.OPa(Ax + (self.sigma * self.workd[Bxslice])) else: Bxslice = slice(self.ipntr[2] - 1, self.ipntr[2] - 1 + self.n) self.workd[yslice] = self.OPa(self.workd[Bxslice]) elif self.ido == 2: self.workd[yslice] = self.B(self.workd[xslice]) elif self.ido == 3: raise ValueError("ARPACK requested user shifts. Assure ISHIFT==0") else: self.converged = True if self.info == 0: pass elif self.info == 1: self._raise_no_convergence() else: raise ArpackError(self.info, infodict=self.iterate_infodict) def extract(self, return_eigenvectors): rvec = return_eigenvectors ierr = 0 howmny = 'A' # return all eigenvectors sselect = np.zeros(self.ncv, 'int') # unused d, z, ierr = self._arpack_extract(rvec, howmny, sselect, self.sigma, self.bmat, self.which, self.k, self.tol, self.resid, self.v, self.iparam[0:7], self.ipntr, self.workd[0:2 * self.n], self.workl, ierr) if ierr != 0: raise ArpackError(ierr, infodict=self.extract_infodict) k_ok = self.iparam[4] d = d[:k_ok] z = z[:, :k_ok] if return_eigenvectors: return d, z else: return d class _UnsymmetricArpackParams(_ArpackParams): def __init__(self, n, k, tp, matvec, mode=1, M_matvec=None, Minv_matvec=None, sigma=None, ncv=None, v0=None, maxiter=None, which="LM", tol=0): # The following modes are supported: # mode = 1: # Solve the standard eigenvalue problem: # A*x = lambda*x # A - square matrix # Arguments should be # matvec = left multiplication by A # M_matvec = None [not used] # Minv_matvec = None [not used] # # mode = 2: # Solve the generalized eigenvalue problem: # A*x = lambda*M*x # A - square matrix # M - symmetric, positive semi-definite # Arguments should be # matvec = left multiplication by A # M_matvec = left multiplication by M # Minv_matvec = left multiplication by M^-1 # # mode = 3,4: # Solve the general eigenvalue problem in shift-invert mode: # A*x = lambda*M*x # A - square matrix # M - symmetric, positive semi-definite # Arguments should be # matvec = None [not used] # M_matvec = left multiplication by M # or None, if M is the identity # Minv_matvec = left multiplication by [A-sigma*M]^-1 # if A is real and mode==3, use the real part of Minv_matvec # if A is real and mode==4, use the imag part of Minv_matvec # if A is complex and mode==3, # use real and imag parts of Minv_matvec if mode == 1: if matvec is None: raise ValueError("matvec must be specified for mode=1") if M_matvec is not None: raise ValueError("M_matvec cannot be specified for mode=1") if Minv_matvec is not None: raise ValueError("Minv_matvec cannot be specified for mode=1") self.OP = matvec self.B = lambda x: x self.bmat = 'I' elif mode == 2: if matvec is None: raise ValueError("matvec must be specified for mode=2") if M_matvec is None: raise ValueError("M_matvec must be specified for mode=2") if Minv_matvec is None: raise ValueError("Minv_matvec must be specified for mode=2") self.OP = lambda x: Minv_matvec(matvec(x)) self.OPa = Minv_matvec self.OPb = matvec self.B = M_matvec self.bmat = 'G' elif mode in (3, 4): if matvec is None: raise ValueError("matvec must be specified " "for mode in (3,4)") if Minv_matvec is None: raise ValueError("Minv_matvec must be specified " "for mode in (3,4)") self.matvec = matvec if tp in 'DF': # complex type if mode == 3: self.OPa = Minv_matvec else: raise ValueError("mode=4 invalid for complex A") else: # real type if mode == 3: self.OPa = lambda x: np.real(Minv_matvec(x)) else: self.OPa = lambda x: np.imag(Minv_matvec(x)) if M_matvec is None: self.B = lambda x: x self.bmat = 'I' self.OP = self.OPa else: self.B = M_matvec self.bmat = 'G' self.OP = lambda x: self.OPa(M_matvec(x)) else: raise ValueError("mode=%i not implemented" % mode) if which not in _NEUPD_WHICH: raise ValueError("Parameter which must be one of %s" % ' '.join(_NEUPD_WHICH)) if k >= n - 1: raise ValueError("k must be less than ndim(A)-1, k=%d" % k) _ArpackParams.__init__(self, n, k, tp, mode, sigma, ncv, v0, maxiter, which, tol) if self.ncv > n or self.ncv <= k + 1: raise ValueError("ncv must be k+1<ncv<=n, ncv=%s" % self.ncv) # Use _aligned_zeros to work around a f2py bug in Numpy 1.9.1 self.workd = _aligned_zeros(3 * n, self.tp) self.workl = _aligned_zeros(3 * self.ncv * (self.ncv + 2), self.tp) ltr = _type_conv[self.tp] self._arpack_solver = _arpack.__dict__[ltr + 'naupd'] self._arpack_extract = _arpack.__dict__[ltr + 'neupd'] self.iterate_infodict = _NAUPD_ERRORS[ltr] self.extract_infodict = _NEUPD_ERRORS[ltr] self.ipntr = np.zeros(14, arpack_int) if self.tp in 'FD': # Use _aligned_zeros to work around a f2py bug in Numpy 1.9.1 self.rwork = _aligned_zeros(self.ncv, self.tp.lower()) else: self.rwork = None def iterate(self): if self.tp in 'fd': self.ido, self.tol, self.resid, self.v, self.iparam, self.ipntr, self.info =\ self._arpack_solver(self.ido, self.bmat, self.which, self.k, self.tol, self.resid, self.v, self.iparam, self.ipntr, self.workd, self.workl, self.info) else: self.ido, self.tol, self.resid, self.v, self.iparam, self.ipntr, self.info =\ self._arpack_solver(self.ido, self.bmat, self.which, self.k, self.tol, self.resid, self.v, self.iparam, self.ipntr, self.workd, self.workl, self.rwork, self.info) xslice = slice(self.ipntr[0] - 1, self.ipntr[0] - 1 + self.n) yslice = slice(self.ipntr[1] - 1, self.ipntr[1] - 1 + self.n) if self.ido == -1: # initialization self.workd[yslice] = self.OP(self.workd[xslice]) elif self.ido == 1: # compute y = Op*x if self.mode in (1, 2): self.workd[yslice] = self.OP(self.workd[xslice]) else: Bxslice = slice(self.ipntr[2] - 1, self.ipntr[2] - 1 + self.n) self.workd[yslice] = self.OPa(self.workd[Bxslice]) elif self.ido == 2: self.workd[yslice] = self.B(self.workd[xslice]) elif self.ido == 3: raise ValueError("ARPACK requested user shifts. Assure ISHIFT==0") else: self.converged = True if self.info == 0: pass elif self.info == 1: self._raise_no_convergence() else: raise ArpackError(self.info, infodict=self.iterate_infodict) def extract(self, return_eigenvectors): k, n = self.k, self.n ierr = 0 howmny = 'A' # return all eigenvectors sselect = np.zeros(self.ncv, 'int') # unused sigmar = np.real(self.sigma) sigmai = np.imag(self.sigma) workev = np.zeros(3 * self.ncv, self.tp) if self.tp in 'fd': dr = np.zeros(k + 1, self.tp) di = np.zeros(k + 1, self.tp) zr = np.zeros((n, k + 1), self.tp) dr, di, zr, ierr = \ self._arpack_extract(return_eigenvectors, howmny, sselect, sigmar, sigmai, workev, self.bmat, self.which, k, self.tol, self.resid, self.v, self.iparam, self.ipntr, self.workd, self.workl, self.info) if ierr != 0: raise ArpackError(ierr, infodict=self.extract_infodict) nreturned = self.iparam[4] # number of good eigenvalues returned # Build complex eigenvalues from real and imaginary parts d = dr + 1.0j * di # Arrange the eigenvectors: complex eigenvectors are stored as # real,imaginary in consecutive columns z = zr.astype(self.tp.upper()) # The ARPACK nonsymmetric real and double interface (s,d)naupd # return eigenvalues and eigenvectors in real (float,double) # arrays. # Efficiency: this should check that return_eigenvectors == True # before going through this construction. if sigmai == 0: i = 0 while i <= k: # check if complex if abs(d[i].imag) != 0: # this is a complex conjugate pair with eigenvalues # in consecutive columns if i < k: z[:, i] = zr[:, i] + 1.0j * zr[:, i + 1] z[:, i + 1] = z[:, i].conjugate() i += 1 else: #last eigenvalue is complex: the imaginary part of # the eigenvector has not been returned #this can only happen if nreturned > k, so we'll # throw out this case. nreturned -= 1 i += 1 else: # real matrix, mode 3 or 4, imag(sigma) is nonzero: # see remark 3 in <s,d>neupd.f # Build complex eigenvalues from real and imaginary parts i = 0 while i <= k: if abs(d[i].imag) == 0: d[i] = np.dot(zr[:, i], self.matvec(zr[:, i])) else: if i < k: z[:, i] = zr[:, i] + 1.0j * zr[:, i + 1] z[:, i + 1] = z[:, i].conjugate() d[i] = ((np.dot(zr[:, i], self.matvec(zr[:, i])) + np.dot(zr[:, i + 1], self.matvec(zr[:, i + 1]))) + 1j * (np.dot(zr[:, i], self.matvec(zr[:, i + 1])) - np.dot(zr[:, i + 1], self.matvec(zr[:, i])))) d[i + 1] = d[i].conj() i += 1 else: #last eigenvalue is complex: the imaginary part of # the eigenvector has not been returned #this can only happen if nreturned > k, so we'll # throw out this case. nreturned -= 1 i += 1 # Now we have k+1 possible eigenvalues and eigenvectors # Return the ones specified by the keyword "which" if nreturned <= k: # we got less or equal as many eigenvalues we wanted d = d[:nreturned] z = z[:, :nreturned] else: # we got one extra eigenvalue (likely a cc pair, but which?) if self.mode in (1, 2): rd = d elif self.mode in (3, 4): rd = 1 / (d - self.sigma) if self.which in ['LR', 'SR']: ind = np.argsort(rd.real) elif self.which in ['LI', 'SI']: # for LI,SI ARPACK returns largest,smallest # abs(imaginary) (complex pairs come together) ind = np.argsort(abs(rd.imag)) else: ind = np.argsort(abs(rd)) if self.which in ['LR', 'LM', 'LI']: ind = ind[-k:][::-1] elif self.which in ['SR', 'SM', 'SI']: ind = ind[:k] d = d[ind] z = z[:, ind] else: # complex is so much simpler... d, z, ierr =\ self._arpack_extract(return_eigenvectors, howmny, sselect, self.sigma, workev, self.bmat, self.which, k, self.tol, self.resid, self.v, self.iparam, self.ipntr, self.workd, self.workl, self.rwork, ierr) if ierr != 0: raise ArpackError(ierr, infodict=self.extract_infodict) k_ok = self.iparam[4] d = d[:k_ok] z = z[:, :k_ok] if return_eigenvectors: return d, z else: return d def _aslinearoperator_with_dtype(m): m = aslinearoperator(m) if not hasattr(m, 'dtype'): x = np.zeros(m.shape[1]) m.dtype = (m * x).dtype return m class SpLuInv(LinearOperator): """ SpLuInv: helper class to repeatedly solve M*x=b using a sparse LU-decomposition of M """ def __init__(self, M): self.M_lu = splu(M) self.shape = M.shape self.dtype = M.dtype self.isreal = not np.issubdtype(self.dtype, np.complexfloating) def _matvec(self, x): # careful here: splu.solve will throw away imaginary # part of x if M is real x = np.asarray(x) if self.isreal and np.issubdtype(x.dtype, np.complexfloating): return (self.M_lu.solve(np.real(x).astype(self.dtype)) + 1j * self.M_lu.solve(np.imag(x).astype(self.dtype))) else: return self.M_lu.solve(x.astype(self.dtype)) class LuInv(LinearOperator): """ LuInv: helper class to repeatedly solve M*x=b using an LU-decomposition of M """ def __init__(self, M): self.M_lu = lu_factor(M) self.shape = M.shape self.dtype = M.dtype def _matvec(self, x): return lu_solve(self.M_lu, x) def gmres_loose(A, b, tol): """ gmres with looser termination condition. """ b = np.asarray(b) min_tol = 1000 * np.sqrt(b.size) * np.finfo(b.dtype).eps return gmres(A, b, tol=max(tol, min_tol), atol=0) class IterInv(LinearOperator): """ IterInv: helper class to repeatedly solve M*x=b using an iterative method. """ def __init__(self, M, ifunc=gmres_loose, tol=0): self.M = M if hasattr(M, 'dtype'): self.dtype = M.dtype else: x = np.zeros(M.shape[1]) self.dtype = (M * x).dtype self.shape = M.shape if tol <= 0: # when tol=0, ARPACK uses machine tolerance as calculated # by LAPACK's _LAMCH function. We should match this tol = 2 * np.finfo(self.dtype).eps self.ifunc = ifunc self.tol = tol def _matvec(self, x): b, info = self.ifunc(self.M, x, tol=self.tol) if info != 0: raise ValueError("Error in inverting M: function " "%s did not converge (info = %i)." % (self.ifunc.__name__, info)) return b class IterOpInv(LinearOperator): """ IterOpInv: helper class to repeatedly solve [A-sigma*M]*x = b using an iterative method """ def __init__(self, A, M, sigma, ifunc=gmres_loose, tol=0): self.A = A self.M = M self.sigma = sigma def mult_func(x): return A.matvec(x) - sigma * M.matvec(x) def mult_func_M_None(x): return A.matvec(x) - sigma * x x = np.zeros(A.shape[1]) if M is None: dtype = mult_func_M_None(x).dtype self.OP = LinearOperator(self.A.shape, mult_func_M_None, dtype=dtype) else: dtype = mult_func(x).dtype self.OP = LinearOperator(self.A.shape, mult_func, dtype=dtype) self.shape = A.shape if tol <= 0: # when tol=0, ARPACK uses machine tolerance as calculated # by LAPACK's _LAMCH function. We should match this tol = 2 * np.finfo(self.OP.dtype).eps self.ifunc = ifunc self.tol = tol def _matvec(self, x): b, info = self.ifunc(self.OP, x, tol=self.tol) if info != 0: raise ValueError("Error in inverting [A-sigma*M]: function " "%s did not converge (info = %i)." % (self.ifunc.__name__, info)) return b @property def dtype(self): return self.OP.dtype def _fast_spmatrix_to_csc(A, hermitian=False): """Convert sparse matrix to CSC (by transposing, if possible)""" if (isspmatrix_csr(A) and hermitian and not np.issubdtype(A.dtype, np.complexfloating)): return A.T elif is_pydata_spmatrix(A): # No need to convert return A else: return A.tocsc() def get_inv_matvec(M, hermitian=False, tol=0): if isdense(M): return LuInv(M).matvec elif isspmatrix(M) or is_pydata_spmatrix(M): M = _fast_spmatrix_to_csc(M, hermitian=hermitian) return SpLuInv(M).matvec else: return IterInv(M, tol=tol).matvec def get_OPinv_matvec(A, M, sigma, hermitian=False, tol=0): if sigma == 0: return get_inv_matvec(A, hermitian=hermitian, tol=tol) if M is None: #M is the identity matrix if isdense(A): if (np.issubdtype(A.dtype, np.complexfloating) or np.imag(sigma) == 0): A = np.copy(A) else: A = A + 0j A.flat[::A.shape[1] + 1] -= sigma return LuInv(A).matvec elif isspmatrix(A) or is_pydata_spmatrix(A): A = A - sigma * eye(A.shape[0]) A = _fast_spmatrix_to_csc(A, hermitian=hermitian) return SpLuInv(A).matvec else: return IterOpInv(_aslinearoperator_with_dtype(A), M, sigma, tol=tol).matvec else: if ((not isdense(A) and not isspmatrix(A) and not is_pydata_spmatrix(A)) or (not isdense(M) and not isspmatrix(M) and not is_pydata_spmatrix(A))): return IterOpInv(_aslinearoperator_with_dtype(A), _aslinearoperator_with_dtype(M), sigma, tol=tol).matvec elif isdense(A) or isdense(M): return LuInv(A - sigma * M).matvec else: OP = A - sigma * M OP = _fast_spmatrix_to_csc(OP, hermitian=hermitian) return SpLuInv(OP).matvec # ARPACK is not threadsafe or reentrant (SAVE variables), so we need a # lock and a re-entering check. _ARPACK_LOCK = ReentrancyLock("Nested calls to eigs/eighs not allowed: " "ARPACK is not re-entrant") def eigs(A, k=6, M=None, sigma=None, which='LM', v0=None, ncv=None, maxiter=None, tol=0, return_eigenvectors=True, Minv=None, OPinv=None, OPpart=None): """ Find k eigenvalues and eigenvectors of the square matrix A. Solves ``A * x[i] = w[i] * x[i]``, the standard eigenvalue problem for w[i] eigenvalues with corresponding eigenvectors x[i]. If M is specified, solves ``A * x[i] = w[i] * M * x[i]``, the generalized eigenvalue problem for w[i] eigenvalues with corresponding eigenvectors x[i] Parameters ---------- A : ndarray, sparse matrix or LinearOperator An array, sparse matrix, or LinearOperator representing the operation ``A * x``, where A is a real or complex square matrix. k : int, optional The number of eigenvalues and eigenvectors desired. `k` must be smaller than N-1. It is not possible to compute all eigenvectors of a matrix. M : ndarray, sparse matrix or LinearOperator, optional An array, sparse matrix, or LinearOperator representing the operation M*x for the generalized eigenvalue problem A * x = w * M * x. M must represent a real symmetric matrix if A is real, and must represent a complex Hermitian matrix if A is complex. For best results, the data type of M should be the same as that of A. Additionally: If `sigma` is None, M is positive definite If sigma is specified, M is positive semi-definite If sigma is None, eigs requires an operator to compute the solution of the linear equation ``M * x = b``. This is done internally via a (sparse) LU decomposition for an explicit matrix M, or via an iterative solver for a general linear operator. Alternatively, the user can supply the matrix or operator Minv, which gives ``x = Minv * b = M^-1 * b``. sigma : real or complex, optional Find eigenvalues near sigma using shift-invert mode. This requires an operator to compute the solution of the linear system ``[A - sigma * M] * x = b``, where M is the identity matrix if unspecified. This is computed internally via a (sparse) LU decomposition for explicit matrices A & M, or via an iterative solver if either A or M is a general linear operator. Alternatively, the user can supply the matrix or operator OPinv, which gives ``x = OPinv * b = [A - sigma * M]^-1 * b``. For a real matrix A, shift-invert can either be done in imaginary mode or real mode, specified by the parameter OPpart ('r' or 'i'). Note that when sigma is specified, the keyword 'which' (below) refers to the shifted eigenvalues ``w'[i]`` where: If A is real and OPpart == 'r' (default), ``w'[i] = 1/2 * [1/(w[i]-sigma) + 1/(w[i]-conj(sigma))]``. If A is real and OPpart == 'i', ``w'[i] = 1/2i * [1/(w[i]-sigma) - 1/(w[i]-conj(sigma))]``. If A is complex, ``w'[i] = 1/(w[i]-sigma)``. v0 : ndarray, optional Starting vector for iteration. Default: random ncv : int, optional The number of Lanczos vectors generated `ncv` must be greater than `k`; it is recommended that ``ncv > 2*k``. Default: ``min(n, max(2*k + 1, 20))`` which : str, ['LM' | 'SM' | 'LR' | 'SR' | 'LI' | 'SI'], optional Which `k` eigenvectors and eigenvalues to find: 'LM' : largest magnitude 'SM' : smallest magnitude 'LR' : largest real part 'SR' : smallest real part 'LI' : largest imaginary part 'SI' : smallest imaginary part When sigma != None, 'which' refers to the shifted eigenvalues w'[i] (see discussion in 'sigma', above). ARPACK is generally better at finding large values than small values. If small eigenvalues are desired, consider using shift-invert mode for better performance. maxiter : int, optional Maximum number of Arnoldi update iterations allowed Default: ``n*10`` tol : float, optional Relative accuracy for eigenvalues (stopping criterion) The default value of 0 implies machine precision. return_eigenvectors : bool, optional Return eigenvectors (True) in addition to eigenvalues Minv : ndarray, sparse matrix or LinearOperator, optional See notes in M, above. OPinv : ndarray, sparse matrix or LinearOperator, optional See notes in sigma, above. OPpart : {'r' or 'i'}, optional See notes in sigma, above Returns ------- w : ndarray Array of k eigenvalues. v : ndarray An array of `k` eigenvectors. ``v[:, i]`` is the eigenvector corresponding to the eigenvalue w[i]. Raises ------ ArpackNoConvergence When the requested convergence is not obtained. The currently converged eigenvalues and eigenvectors can be found as ``eigenvalues`` and ``eigenvectors`` attributes of the exception object. See Also -------- eigsh : eigenvalues and eigenvectors for symmetric matrix A svds : singular value decomposition for a matrix A Notes ----- This function is a wrapper to the ARPACK [1]_ SNEUPD, DNEUPD, CNEUPD, ZNEUPD, functions which use the Implicitly Restarted Arnoldi Method to find the eigenvalues and eigenvectors [2]_. References ---------- .. [1] ARPACK Software, http://www.caam.rice.edu/software/ARPACK/ .. [2] R. B. Lehoucq, D. C. Sorensen, and C. Yang, ARPACK USERS GUIDE: Solution of Large Scale Eigenvalue Problems by Implicitly Restarted Arnoldi Methods. SIAM, Philadelphia, PA, 1998. Examples -------- Find 6 eigenvectors of the identity matrix: >>> from scipy.sparse.linalg import eigs >>> id = np.eye(13) >>> vals, vecs = eigs(id, k=6) >>> vals array([ 1.+0.j, 1.+0.j, 1.+0.j, 1.+0.j, 1.+0.j, 1.+0.j]) >>> vecs.shape (13, 6) """ if A.shape[0] != A.shape[1]: raise ValueError('expected square matrix (shape=%s)' % (A.shape,)) if M is not None: if M.shape != A.shape: raise ValueError('wrong M dimensions %s, should be %s' % (M.shape, A.shape)) if np.dtype(M.dtype).char.lower() != np.dtype(A.dtype).char.lower(): warnings.warn('M does not have the same type precision as A. ' 'This may adversely affect ARPACK convergence') n = A.shape[0] if k <= 0: raise ValueError("k=%d must be greater than 0." % k) if k >= n - 1: warnings.warn("k >= N - 1 for N * N square matrix. " "Attempting to use scipy.linalg.eig instead.", RuntimeWarning) if issparse(A): raise TypeError("Cannot use scipy.linalg.eig for sparse A with " "k >= N - 1. Use scipy.linalg.eig(A.toarray()) or" " reduce k.") if isinstance(A, LinearOperator): raise TypeError("Cannot use scipy.linalg.eig for LinearOperator " "A with k >= N - 1.") if isinstance(M, LinearOperator): raise TypeError("Cannot use scipy.linalg.eig for LinearOperator " "M with k >= N - 1.") return eig(A, b=M, right=return_eigenvectors) if sigma is None: matvec = _aslinearoperator_with_dtype(A).matvec if OPinv is not None: raise ValueError("OPinv should not be specified " "with sigma = None.") if OPpart is not None: raise ValueError("OPpart should not be specified with " "sigma = None or complex A") if M is None: #standard eigenvalue problem mode = 1 M_matvec = None Minv_matvec = None if Minv is not None: raise ValueError("Minv should not be " "specified with M = None.") else: #general eigenvalue problem mode = 2 if Minv is None: Minv_matvec = get_inv_matvec(M, hermitian=True, tol=tol) else: Minv = _aslinearoperator_with_dtype(Minv) Minv_matvec = Minv.matvec M_matvec = _aslinearoperator_with_dtype(M).matvec else: #sigma is not None: shift-invert mode if np.issubdtype(A.dtype, np.complexfloating): if OPpart is not None: raise ValueError("OPpart should not be specified " "with sigma=None or complex A") mode = 3 elif OPpart is None or OPpart.lower() == 'r': mode = 3 elif OPpart.lower() == 'i': if np.imag(sigma) == 0: raise ValueError("OPpart cannot be 'i' if sigma is real") mode = 4 else: raise ValueError("OPpart must be one of ('r','i')") matvec = _aslinearoperator_with_dtype(A).matvec if Minv is not None: raise ValueError("Minv should not be specified when sigma is") if OPinv is None: Minv_matvec = get_OPinv_matvec(A, M, sigma, hermitian=False, tol=tol) else: OPinv = _aslinearoperator_with_dtype(OPinv) Minv_matvec = OPinv.matvec if M is None: M_matvec = None else: M_matvec = _aslinearoperator_with_dtype(M).matvec params = _UnsymmetricArpackParams(n, k, A.dtype.char, matvec, mode, M_matvec, Minv_matvec, sigma, ncv, v0, maxiter, which, tol) with _ARPACK_LOCK: while not params.converged: params.iterate() return params.extract(return_eigenvectors) def eigsh(A, k=6, M=None, sigma=None, which='LM', v0=None, ncv=None, maxiter=None, tol=0, return_eigenvectors=True, Minv=None, OPinv=None, mode='normal'): """ Find k eigenvalues and eigenvectors of the real symmetric square matrix or complex Hermitian matrix A. Solves ``A * x[i] = w[i] * x[i]``, the standard eigenvalue problem for w[i] eigenvalues with corresponding eigenvectors x[i]. If M is specified, solves ``A * x[i] = w[i] * M * x[i]``, the generalized eigenvalue problem for w[i] eigenvalues with corresponding eigenvectors x[i]. Note that there is no specialized routine for the case when A is a complex Hermitian matrix. In this case, ``eigsh()`` will call ``eigs()`` and return the real parts of the eigenvalues thus obtained. Parameters ---------- A : ndarray, sparse matrix or LinearOperator A square operator representing the operation ``A * x``, where ``A`` is real symmetric or complex Hermitian. For buckling mode (see below) ``A`` must additionally be positive-definite. k : int, optional The number of eigenvalues and eigenvectors desired. `k` must be smaller than N. It is not possible to compute all eigenvectors of a matrix. Returns ------- w : array Array of k eigenvalues. v : array An array representing the `k` eigenvectors. The column ``v[:, i]`` is the eigenvector corresponding to the eigenvalue ``w[i]``. Other Parameters ---------------- M : An N x N matrix, array, sparse matrix, or linear operator representing the operation ``M @ x`` for the generalized eigenvalue problem A @ x = w * M @ x. M must represent a real symmetric matrix if A is real, and must represent a complex Hermitian matrix if A is complex. For best results, the data type of M should be the same as that of A. Additionally: If sigma is None, M is symmetric positive definite. If sigma is specified, M is symmetric positive semi-definite. In buckling mode, M is symmetric indefinite. If sigma is None, eigsh requires an operator to compute the solution of the linear equation ``M @ x = b``. This is done internally via a (sparse) LU decomposition for an explicit matrix M, or via an iterative solver for a general linear operator. Alternatively, the user can supply the matrix or operator Minv, which gives ``x = Minv @ b = M^-1 @ b``. sigma : real Find eigenvalues near sigma using shift-invert mode. This requires an operator to compute the solution of the linear system ``[A - sigma * M] x = b``, where M is the identity matrix if unspecified. This is computed internally via a (sparse) LU decomposition for explicit matrices A & M, or via an iterative solver if either A or M is a general linear operator. Alternatively, the user can supply the matrix or operator OPinv, which gives ``x = OPinv @ b = [A - sigma * M]^-1 @ b``. Note that when sigma is specified, the keyword 'which' refers to the shifted eigenvalues ``w'[i]`` where: if mode == 'normal', ``w'[i] = 1 / (w[i] - sigma)``. if mode == 'cayley', ``w'[i] = (w[i] + sigma) / (w[i] - sigma)``. if mode == 'buckling', ``w'[i] = w[i] / (w[i] - sigma)``. (see further discussion in 'mode' below) v0 : ndarray, optional Starting vector for iteration. Default: random ncv : int, optional The number of Lanczos vectors generated ncv must be greater than k and smaller than n; it is recommended that ``ncv > 2*k``. Default: ``min(n, max(2*k + 1, 20))`` which : str ['LM' | 'SM' | 'LA' | 'SA' | 'BE'] If A is a complex Hermitian matrix, 'BE' is invalid. Which `k` eigenvectors and eigenvalues to find: 'LM' : Largest (in magnitude) eigenvalues. 'SM' : Smallest (in magnitude) eigenvalues. 'LA' : Largest (algebraic) eigenvalues. 'SA' : Smallest (algebraic) eigenvalues. 'BE' : Half (k/2) from each end of the spectrum. When k is odd, return one more (k/2+1) from the high end. When sigma != None, 'which' refers to the shifted eigenvalues ``w'[i]`` (see discussion in 'sigma', above). ARPACK is generally better at finding large values than small values. If small eigenvalues are desired, consider using shift-invert mode for better performance. maxiter : int, optional Maximum number of Arnoldi update iterations allowed. Default: ``n*10`` tol : float Relative accuracy for eigenvalues (stopping criterion). The default value of 0 implies machine precision. Minv : N x N matrix, array, sparse matrix, or LinearOperator See notes in M, above. OPinv : N x N matrix, array, sparse matrix, or LinearOperator See notes in sigma, above. return_eigenvectors : bool Return eigenvectors (True) in addition to eigenvalues. This value determines the order in which eigenvalues are sorted. The sort order is also dependent on the `which` variable. For which = 'LM' or 'SA': If `return_eigenvectors` is True, eigenvalues are sorted by algebraic value. If `return_eigenvectors` is False, eigenvalues are sorted by absolute value. For which = 'BE' or 'LA': eigenvalues are always sorted by algebraic value. For which = 'SM': If `return_eigenvectors` is True, eigenvalues are sorted by algebraic value. If `return_eigenvectors` is False, eigenvalues are sorted by decreasing absolute value. mode : string ['normal' | 'buckling' | 'cayley'] Specify strategy to use for shift-invert mode. This argument applies only for real-valued A and sigma != None. For shift-invert mode, ARPACK internally solves the eigenvalue problem ``OP * x'[i] = w'[i] * B * x'[i]`` and transforms the resulting Ritz vectors x'[i] and Ritz values w'[i] into the desired eigenvectors and eigenvalues of the problem ``A * x[i] = w[i] * M * x[i]``. The modes are as follows: 'normal' : OP = [A - sigma * M]^-1 @ M, B = M, w'[i] = 1 / (w[i] - sigma) 'buckling' : OP = [A - sigma * M]^-1 @ A, B = A, w'[i] = w[i] / (w[i] - sigma) 'cayley' : OP = [A - sigma * M]^-1 @ [A + sigma * M], B = M, w'[i] = (w[i] + sigma) / (w[i] - sigma) The choice of mode will affect which eigenvalues are selected by the keyword 'which', and can also impact the stability of convergence (see [2] for a discussion). Raises ------ ArpackNoConvergence When the requested convergence is not obtained. The currently converged eigenvalues and eigenvectors can be found as ``eigenvalues`` and ``eigenvectors`` attributes of the exception object. See Also -------- eigs : eigenvalues and eigenvectors for a general (nonsymmetric) matrix A svds : singular value decomposition for a matrix A Notes ----- This function is a wrapper to the ARPACK [1]_ SSEUPD and DSEUPD functions which use the Implicitly Restarted Lanczos Method to find the eigenvalues and eigenvectors [2]_. References ---------- .. [1] ARPACK Software, http://www.caam.rice.edu/software/ARPACK/ .. [2] R. B. Lehoucq, D. C. Sorensen, and C. Yang, ARPACK USERS GUIDE: Solution of Large Scale Eigenvalue Problems by Implicitly Restarted Arnoldi Methods. SIAM, Philadelphia, PA, 1998. Examples -------- >>> from scipy.sparse.linalg import eigsh >>> identity = np.eye(13) >>> eigenvalues, eigenvectors = eigsh(identity, k=6) >>> eigenvalues array([1., 1., 1., 1., 1., 1.]) >>> eigenvectors.shape (13, 6) """ # complex Hermitian matrices should be solved with eigs if np.issubdtype(A.dtype, np.complexfloating): if mode != 'normal': raise ValueError("mode=%s cannot be used with " "complex matrix A" % mode) if which == 'BE': raise ValueError("which='BE' cannot be used with complex matrix A") elif which == 'LA': which = 'LR' elif which == 'SA': which = 'SR' ret = eigs(A, k, M=M, sigma=sigma, which=which, v0=v0, ncv=ncv, maxiter=maxiter, tol=tol, return_eigenvectors=return_eigenvectors, Minv=Minv, OPinv=OPinv) if return_eigenvectors: return ret[0].real, ret[1] else: return ret.real if A.shape[0] != A.shape[1]: raise ValueError('expected square matrix (shape=%s)' % (A.shape,)) if M is not None: if M.shape != A.shape: raise ValueError('wrong M dimensions %s, should be %s' % (M.shape, A.shape)) if np.dtype(M.dtype).char.lower() != np.dtype(A.dtype).char.lower(): warnings.warn('M does not have the same type precision as A. ' 'This may adversely affect ARPACK convergence') n = A.shape[0] if k <= 0: raise ValueError("k must be greater than 0.") if k >= n: warnings.warn("k >= N for N * N square matrix. " "Attempting to use scipy.linalg.eigh instead.", RuntimeWarning) if issparse(A): raise TypeError("Cannot use scipy.linalg.eigh for sparse A with " "k >= N. Use scipy.linalg.eigh(A.toarray()) or" " reduce k.") if isinstance(A, LinearOperator): raise TypeError("Cannot use scipy.linalg.eigh for LinearOperator " "A with k >= N.") if isinstance(M, LinearOperator): raise TypeError("Cannot use scipy.linalg.eigh for LinearOperator " "M with k >= N.") return eigh(A, b=M, eigvals_only=not return_eigenvectors) if sigma is None: A = _aslinearoperator_with_dtype(A) matvec = A.matvec if OPinv is not None: raise ValueError("OPinv should not be specified " "with sigma = None.") if M is None: #standard eigenvalue problem mode = 1 M_matvec = None Minv_matvec = None if Minv is not None: raise ValueError("Minv should not be " "specified with M = None.") else: #general eigenvalue problem mode = 2 if Minv is None: Minv_matvec = get_inv_matvec(M, hermitian=True, tol=tol) else: Minv = _aslinearoperator_with_dtype(Minv) Minv_matvec = Minv.matvec M_matvec = _aslinearoperator_with_dtype(M).matvec else: # sigma is not None: shift-invert mode if Minv is not None: raise ValueError("Minv should not be specified when sigma is") # normal mode if mode == 'normal': mode = 3 matvec = None if OPinv is None: Minv_matvec = get_OPinv_matvec(A, M, sigma, hermitian=True, tol=tol) else: OPinv = _aslinearoperator_with_dtype(OPinv) Minv_matvec = OPinv.matvec if M is None: M_matvec = None else: M = _aslinearoperator_with_dtype(M) M_matvec = M.matvec # buckling mode elif mode == 'buckling': mode = 4 if OPinv is None: Minv_matvec = get_OPinv_matvec(A, M, sigma, hermitian=True, tol=tol) else: Minv_matvec = _aslinearoperator_with_dtype(OPinv).matvec matvec = _aslinearoperator_with_dtype(A).matvec M_matvec = None # cayley-transform mode elif mode == 'cayley': mode = 5 matvec = _aslinearoperator_with_dtype(A).matvec if OPinv is None: Minv_matvec = get_OPinv_matvec(A, M, sigma, hermitian=True, tol=tol) else: Minv_matvec = _aslinearoperator_with_dtype(OPinv).matvec if M is None: M_matvec = None else: M_matvec = _aslinearoperator_with_dtype(M).matvec # unrecognized mode else: raise ValueError("unrecognized mode '%s'" % mode) params = _SymmetricArpackParams(n, k, A.dtype.char, matvec, mode, M_matvec, Minv_matvec, sigma, ncv, v0, maxiter, which, tol) with _ARPACK_LOCK: while not params.converged: params.iterate() return params.extract(return_eigenvectors) def _augmented_orthonormal_cols(x, k): # extract the shape of the x array n, m = x.shape # create the expanded array and copy x into it y = np.empty((n, m+k), dtype=x.dtype) y[:, :m] = x # do some modified gram schmidt to add k random orthonormal vectors for i in range(k): # sample a random initial vector v = np.random.randn(n) if np.iscomplexobj(x): v = v + 1j*np.random.randn(n) # subtract projections onto the existing unit length vectors for j in range(m+i): u = y[:, j] v -= (np.dot(v, u.conj()) / np.dot(u, u.conj())) * u # normalize v v /= np.sqrt(np.dot(v, v.conj())) # add v into the output array y[:, m+i] = v # return the expanded array return y def _augmented_orthonormal_rows(x, k): return _augmented_orthonormal_cols(x.T, k).T def _herm(x): return x.T.conj() def svds(A, k=6, ncv=None, tol=0, which='LM', v0=None, maxiter=None, return_singular_vectors=True, solver='arpack'): """Compute the largest or smallest k singular values/vectors for a sparse matrix. The order of the singular values is not guaranteed. Parameters ---------- A : {sparse matrix, LinearOperator} Array to compute the SVD on, of shape (M, N) k : int, optional Number of singular values and vectors to compute. Must be 1 <= k < min(A.shape). ncv : int, optional The number of Lanczos vectors generated ncv must be greater than k+1 and smaller than n; it is recommended that ncv > 2*k Default: ``min(n, max(2*k + 1, 20))`` tol : float, optional Tolerance for singular values. Zero (default) means machine precision. which : str, ['LM' | 'SM'], optional Which `k` singular values to find: - 'LM' : largest singular values - 'SM' : smallest singular values .. versionadded:: 0.12.0 v0 : ndarray, optional Starting vector for iteration, of length min(A.shape). Should be an (approximate) left singular vector if N > M and a right singular vector otherwise. Default: random .. versionadded:: 0.12.0 maxiter : int, optional Maximum number of iterations. .. versionadded:: 0.12.0 return_singular_vectors : bool or str, optional - True: return singular vectors (True) in addition to singular values. .. versionadded:: 0.12.0 - "u": only return the u matrix, without computing vh (if N > M). - "vh": only return the vh matrix, without computing u (if N <= M). .. versionadded:: 0.16.0 solver : str, optional Eigenvalue solver to use. Should be 'arpack' or 'lobpcg'. Default: 'arpack' Returns ------- u : ndarray, shape=(M, k) Unitary matrix having left singular vectors as columns. If `return_singular_vectors` is "vh", this variable is not computed, and None is returned instead. s : ndarray, shape=(k,) The singular values. vt : ndarray, shape=(k, N) Unitary matrix having right singular vectors as rows. If `return_singular_vectors` is "u", this variable is not computed, and None is returned instead. Notes ----- This is a naive implementation using ARPACK or LOBPCG as an eigensolver on A.H * A or A * A.H, depending on which one is more efficient. Examples -------- >>> from scipy.sparse import csc_matrix >>> from scipy.sparse.linalg import svds, eigs >>> A = csc_matrix([[1, 0, 0], [5, 0, 2], [0, -1, 0], [0, 0, 3]], dtype=float) >>> u, s, vt = svds(A, k=2) >>> s array([ 2.75193379, 5.6059665 ]) >>> np.sqrt(eigs(A.dot(A.T), k=2)[0]).real array([ 5.6059665 , 2.75193379]) """ if which == 'LM': largest = True elif which == 'SM': largest = False else: raise ValueError("which must be either 'LM' or 'SM'.") if not (isinstance(A, LinearOperator) or isspmatrix(A) or is_pydata_spmatrix(A)): A = np.asarray(A) n, m = A.shape if k <= 0 or k >= min(n, m): raise ValueError("k must be between 1 and min(A.shape), k=%d" % k) if isinstance(A, LinearOperator): if n > m: X_dot = A.matvec X_matmat = A.matmat XH_dot = A.rmatvec XH_mat = A.rmatmat else: X_dot = A.rmatvec X_matmat = A.rmatmat XH_dot = A.matvec XH_mat = A.matmat dtype = getattr(A, 'dtype', None) if dtype is None: dtype = A.dot(np.zeros([m, 1])).dtype else: if n > m: X_dot = X_matmat = A.dot XH_dot = XH_mat = _herm(A).dot else: XH_dot = XH_mat = A.dot X_dot = X_matmat = _herm(A).dot def matvec_XH_X(x): return XH_dot(X_dot(x)) def matmat_XH_X(x): return XH_mat(X_matmat(x)) XH_X = LinearOperator(matvec=matvec_XH_X, dtype=A.dtype, matmat=matmat_XH_X, shape=(min(A.shape), min(A.shape))) # Get a low rank approximation of the implicitly defined gramian matrix. # This is not a stable way to approach the problem. if solver == 'lobpcg': if k == 1 and v0 is not None: X = np.reshape(v0, (-1, 1)) else: X = np.random.RandomState(52).randn(min(A.shape), k) eigvals, eigvec = lobpcg(XH_X, X, tol=tol ** 2, maxiter=maxiter, largest=largest) elif solver == 'arpack' or solver is None: eigvals, eigvec = eigsh(XH_X, k=k, tol=tol ** 2, maxiter=maxiter, ncv=ncv, which=which, v0=v0) else: raise ValueError("solver must be either 'arpack', or 'lobpcg'.") # Gramian matrices have real non-negative eigenvalues. eigvals = np.maximum(eigvals.real, 0) # Use the sophisticated detection of small eigenvalues from pinvh. t = eigvec.dtype.char.lower() factor = {'f': 1E3, 'd': 1E6} cond = factor[t] * np.finfo(t).eps cutoff = cond * np.max(eigvals) # Get a mask indicating which eigenpairs are not degenerately tiny, # and create the re-ordered array of thresholded singular values. above_cutoff = (eigvals > cutoff) nlarge = above_cutoff.sum() nsmall = k - nlarge slarge = np.sqrt(eigvals[above_cutoff]) s = np.zeros_like(eigvals) s[:nlarge] = slarge if not return_singular_vectors: return np.sort(s) if n > m: vlarge = eigvec[:, above_cutoff] ularge = X_matmat(vlarge) / slarge if return_singular_vectors != 'vh' else None vhlarge = _herm(vlarge) else: ularge = eigvec[:, above_cutoff] vhlarge = _herm(X_matmat(ularge) / slarge) if return_singular_vectors != 'u' else None u = _augmented_orthonormal_cols(ularge, nsmall) if ularge is not None else None vh = _augmented_orthonormal_rows(vhlarge, nsmall) if vhlarge is not None else None indexes_sorted = np.argsort(s) s = s[indexes_sorted] if u is not None: u = u[:, indexes_sorted] if vh is not None: vh = vh[indexes_sorted] return u, s, vh
import numpy as np from numpy.testing import assert_allclose, assert_array_equal import pytest from scipy.fft import dct, idct, dctn, idctn, dst, idst, dstn, idstn import scipy.fft as fft from scipy import fftpack # scipy.fft wraps the fftpack versions but with normalized inverse transforms. # So, the forward transforms and definitions are already thoroughly tested in # fftpack/test_real_transforms.py @pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("n", [2, 3, 4, 5, 10, 16]) @pytest.mark.parametrize("axis", [0, 1]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_identity_1d(forward, backward, type, n, axis, norm): # Test the identity f^-1(f(x)) == x x = np.random.rand(n, n) y = forward(x, type, axis=axis, norm=norm) z = backward(y, type, axis=axis, norm=norm) assert_allclose(z, x) pad = [(0, 0)] * 2 pad[axis] = (0, 4) y2 = np.pad(y, pad, mode='edge') z2 = backward(y2, type, n, axis, norm) assert_allclose(z2, x) @pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64, np.complex64, np.complex128]) @pytest.mark.parametrize("axis", [0, 1]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) @pytest.mark.parametrize("overwrite_x", [True, False]) def test_identity_1d_overwrite(forward, backward, type, dtype, axis, norm, overwrite_x): # Test the identity f^-1(f(x)) == x x = np.random.rand(7, 8) x_orig = x.copy() y = forward(x, type, axis=axis, norm=norm, overwrite_x=overwrite_x) y_orig = y.copy() z = backward(y, type, axis=axis, norm=norm, overwrite_x=overwrite_x) if not overwrite_x: assert_allclose(z, x, rtol=1e-6, atol=1e-6) assert_array_equal(x, x_orig) assert_array_equal(y, y_orig) else: assert_allclose(z, x_orig, rtol=1e-6, atol=1e-6) @pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("shape, axes", [ ((4, 4), 0), ((4, 4), 1), ((4, 4), None), ((4, 4), (0, 1)), ((10, 12), None), ((10, 12), (0, 1)), ((4, 5, 6), None), ((4, 5, 6), 1), ((4, 5, 6), (0, 2)), ]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_identity_nd(forward, backward, type, shape, axes, norm): # Test the identity f^-1(f(x)) == x x = np.random.random(shape) if axes is not None: shape = np.take(shape, axes) y = forward(x, type, axes=axes, norm=norm) z = backward(y, type, axes=axes, norm=norm) assert_allclose(z, x) if axes is None: pad = [(0, 4)] * x.ndim elif isinstance(axes, int): pad = [(0, 0)] * x.ndim pad[axes] = (0, 4) else: pad = [(0, 0)] * x.ndim for a in axes: pad[a] = (0, 4) y2 = np.pad(y, pad, mode='edge') z2 = backward(y2, type, shape, axes, norm) assert_allclose(z2, x) @pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("shape, axes", [ ((4, 5), 0), ((4, 5), 1), ((4, 5), None), ]) @pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64, np.complex64, np.complex128]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) @pytest.mark.parametrize("overwrite_x", [False, True]) def test_identity_nd_overwrite(forward, backward, type, shape, axes, dtype, norm, overwrite_x): # Test the identity f^-1(f(x)) == x x = np.random.random(shape).astype(dtype) x_orig = x.copy() if axes is not None: shape = np.take(shape, axes) y = forward(x, type, axes=axes, norm=norm) y_orig = y.copy() z = backward(y, type, axes=axes, norm=norm) if overwrite_x: assert_allclose(z, x_orig, rtol=1e-6, atol=1e-6) else: assert_allclose(z, x, rtol=1e-6, atol=1e-6) assert_array_equal(x, x_orig) assert_array_equal(y, y_orig) @pytest.mark.parametrize("func", ['dct', 'dst', 'dctn', 'dstn']) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_fftpack_equivalience(func, type, norm): x = np.random.rand(8, 16) fft_res = getattr(fft, func)(x, type, norm=norm) fftpack_res = getattr(fftpack, func)(x, type, norm=norm) assert_allclose(fft_res, fftpack_res)
e-q/scipy
scipy/fft/tests/test_real_transforms.py
scipy/sparse/linalg/eigen/arpack/arpack.py
""" Window functions (:mod:`scipy.signal.windows`) ============================================== The suite of window functions for filtering and spectral estimation. .. currentmodule:: scipy.signal.windows .. autosummary:: :toctree: generated/ get_window -- Return a window of a given length and type. barthann -- Bartlett-Hann window bartlett -- Bartlett window blackman -- Blackman window blackmanharris -- Minimum 4-term Blackman-Harris window bohman -- Bohman window boxcar -- Boxcar window chebwin -- Dolph-Chebyshev window cosine -- Cosine window dpss -- Discrete prolate spheroidal sequences exponential -- Exponential window flattop -- Flat top window gaussian -- Gaussian window general_cosine -- Generalized Cosine window general_gaussian -- Generalized Gaussian window general_hamming -- Generalized Hamming window hamming -- Hamming window hann -- Hann window hanning -- Hann window kaiser -- Kaiser window nuttall -- Nuttall's minimum 4-term Blackman-Harris window parzen -- Parzen window triang -- Triangular window tukey -- Tukey window """ from .windows import * __all__ = ['boxcar', 'triang', 'parzen', 'bohman', 'blackman', 'nuttall', 'blackmanharris', 'flattop', 'bartlett', 'hanning', 'barthann', 'hamming', 'kaiser', 'gaussian', 'general_gaussian', 'general_cosine', 'general_hamming', 'chebwin', 'cosine', 'hann', 'exponential', 'tukey', 'get_window', 'dpss']
import numpy as np from numpy.testing import assert_allclose, assert_array_equal import pytest from scipy.fft import dct, idct, dctn, idctn, dst, idst, dstn, idstn import scipy.fft as fft from scipy import fftpack # scipy.fft wraps the fftpack versions but with normalized inverse transforms. # So, the forward transforms and definitions are already thoroughly tested in # fftpack/test_real_transforms.py @pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("n", [2, 3, 4, 5, 10, 16]) @pytest.mark.parametrize("axis", [0, 1]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_identity_1d(forward, backward, type, n, axis, norm): # Test the identity f^-1(f(x)) == x x = np.random.rand(n, n) y = forward(x, type, axis=axis, norm=norm) z = backward(y, type, axis=axis, norm=norm) assert_allclose(z, x) pad = [(0, 0)] * 2 pad[axis] = (0, 4) y2 = np.pad(y, pad, mode='edge') z2 = backward(y2, type, n, axis, norm) assert_allclose(z2, x) @pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64, np.complex64, np.complex128]) @pytest.mark.parametrize("axis", [0, 1]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) @pytest.mark.parametrize("overwrite_x", [True, False]) def test_identity_1d_overwrite(forward, backward, type, dtype, axis, norm, overwrite_x): # Test the identity f^-1(f(x)) == x x = np.random.rand(7, 8) x_orig = x.copy() y = forward(x, type, axis=axis, norm=norm, overwrite_x=overwrite_x) y_orig = y.copy() z = backward(y, type, axis=axis, norm=norm, overwrite_x=overwrite_x) if not overwrite_x: assert_allclose(z, x, rtol=1e-6, atol=1e-6) assert_array_equal(x, x_orig) assert_array_equal(y, y_orig) else: assert_allclose(z, x_orig, rtol=1e-6, atol=1e-6) @pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("shape, axes", [ ((4, 4), 0), ((4, 4), 1), ((4, 4), None), ((4, 4), (0, 1)), ((10, 12), None), ((10, 12), (0, 1)), ((4, 5, 6), None), ((4, 5, 6), 1), ((4, 5, 6), (0, 2)), ]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_identity_nd(forward, backward, type, shape, axes, norm): # Test the identity f^-1(f(x)) == x x = np.random.random(shape) if axes is not None: shape = np.take(shape, axes) y = forward(x, type, axes=axes, norm=norm) z = backward(y, type, axes=axes, norm=norm) assert_allclose(z, x) if axes is None: pad = [(0, 4)] * x.ndim elif isinstance(axes, int): pad = [(0, 0)] * x.ndim pad[axes] = (0, 4) else: pad = [(0, 0)] * x.ndim for a in axes: pad[a] = (0, 4) y2 = np.pad(y, pad, mode='edge') z2 = backward(y2, type, shape, axes, norm) assert_allclose(z2, x) @pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("shape, axes", [ ((4, 5), 0), ((4, 5), 1), ((4, 5), None), ]) @pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64, np.complex64, np.complex128]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) @pytest.mark.parametrize("overwrite_x", [False, True]) def test_identity_nd_overwrite(forward, backward, type, shape, axes, dtype, norm, overwrite_x): # Test the identity f^-1(f(x)) == x x = np.random.random(shape).astype(dtype) x_orig = x.copy() if axes is not None: shape = np.take(shape, axes) y = forward(x, type, axes=axes, norm=norm) y_orig = y.copy() z = backward(y, type, axes=axes, norm=norm) if overwrite_x: assert_allclose(z, x_orig, rtol=1e-6, atol=1e-6) else: assert_allclose(z, x, rtol=1e-6, atol=1e-6) assert_array_equal(x, x_orig) assert_array_equal(y, y_orig) @pytest.mark.parametrize("func", ['dct', 'dst', 'dctn', 'dstn']) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_fftpack_equivalience(func, type, norm): x = np.random.rand(8, 16) fft_res = getattr(fft, func)(x, type, norm=norm) fftpack_res = getattr(fftpack, func)(x, type, norm=norm) assert_allclose(fft_res, fftpack_res)
e-q/scipy
scipy/fft/tests/test_real_transforms.py
scipy/signal/windows/__init__.py
""" Spectral Algorithm for Nonlinear Equations """ import collections import numpy as np from scipy.optimize import OptimizeResult from scipy.optimize.optimize import _check_unknown_options from .linesearch import _nonmonotone_line_search_cruz, _nonmonotone_line_search_cheng class _NoConvergence(Exception): pass def _root_df_sane(func, x0, args=(), ftol=1e-8, fatol=1e-300, maxfev=1000, fnorm=None, callback=None, disp=False, M=10, eta_strategy=None, sigma_eps=1e-10, sigma_0=1.0, line_search='cruz', **unknown_options): r""" Solve nonlinear equation with the DF-SANE method Options ------- ftol : float, optional Relative norm tolerance. fatol : float, optional Absolute norm tolerance. Algorithm terminates when ``||func(x)|| < fatol + ftol ||func(x_0)||``. fnorm : callable, optional Norm to use in the convergence check. If None, 2-norm is used. maxfev : int, optional Maximum number of function evaluations. disp : bool, optional Whether to print convergence process to stdout. eta_strategy : callable, optional Choice of the ``eta_k`` parameter, which gives slack for growth of ``||F||**2``. Called as ``eta_k = eta_strategy(k, x, F)`` with `k` the iteration number, `x` the current iterate and `F` the current residual. Should satisfy ``eta_k > 0`` and ``sum(eta, k=0..inf) < inf``. Default: ``||F||**2 / (1 + k)**2``. sigma_eps : float, optional The spectral coefficient is constrained to ``sigma_eps < sigma < 1/sigma_eps``. Default: 1e-10 sigma_0 : float, optional Initial spectral coefficient. Default: 1.0 M : int, optional Number of iterates to include in the nonmonotonic line search. Default: 10 line_search : {'cruz', 'cheng'} Type of line search to employ. 'cruz' is the original one defined in [Martinez & Raydan. Math. Comp. 75, 1429 (2006)], 'cheng' is a modified search defined in [Cheng & Li. IMA J. Numer. Anal. 29, 814 (2009)]. Default: 'cruz' References ---------- .. [1] "Spectral residual method without gradient information for solving large-scale nonlinear systems of equations." W. La Cruz, J.M. Martinez, M. Raydan. Math. Comp. **75**, 1429 (2006). .. [2] W. La Cruz, Opt. Meth. Software, 29, 24 (2014). .. [3] W. Cheng, D.-H. Li. IMA J. Numer. Anal. **29**, 814 (2009). """ _check_unknown_options(unknown_options) if line_search not in ('cheng', 'cruz'): raise ValueError("Invalid value %r for 'line_search'" % (line_search,)) nexp = 2 if eta_strategy is None: # Different choice from [1], as their eta is not invariant # vs. scaling of F. def eta_strategy(k, x, F): # Obtain squared 2-norm of the initial residual from the outer scope return f_0 / (1 + k)**2 if fnorm is None: def fnorm(F): # Obtain squared 2-norm of the current residual from the outer scope return f_k**(1.0/nexp) def fmerit(F): return np.linalg.norm(F)**nexp nfev = [0] f, x_k, x_shape, f_k, F_k, is_complex = _wrap_func(func, x0, fmerit, nfev, maxfev, args) k = 0 f_0 = f_k sigma_k = sigma_0 F_0_norm = fnorm(F_k) # For the 'cruz' line search prev_fs = collections.deque([f_k], M) # For the 'cheng' line search Q = 1.0 C = f_0 converged = False message = "too many function evaluations required" while True: F_k_norm = fnorm(F_k) if disp: print("iter %d: ||F|| = %g, sigma = %g" % (k, F_k_norm, sigma_k)) if callback is not None: callback(x_k, F_k) if F_k_norm < ftol * F_0_norm + fatol: # Converged! message = "successful convergence" converged = True break # Control spectral parameter, from [2] if abs(sigma_k) > 1/sigma_eps: sigma_k = 1/sigma_eps * np.sign(sigma_k) elif abs(sigma_k) < sigma_eps: sigma_k = sigma_eps # Line search direction d = -sigma_k * F_k # Nonmonotone line search eta = eta_strategy(k, x_k, F_k) try: if line_search == 'cruz': alpha, xp, fp, Fp = _nonmonotone_line_search_cruz(f, x_k, d, prev_fs, eta=eta) elif line_search == 'cheng': alpha, xp, fp, Fp, C, Q = _nonmonotone_line_search_cheng(f, x_k, d, f_k, C, Q, eta=eta) except _NoConvergence: break # Update spectral parameter s_k = xp - x_k y_k = Fp - F_k sigma_k = np.vdot(s_k, s_k) / np.vdot(s_k, y_k) # Take step x_k = xp F_k = Fp f_k = fp # Store function value if line_search == 'cruz': prev_fs.append(fp) k += 1 x = _wrap_result(x_k, is_complex, shape=x_shape) F = _wrap_result(F_k, is_complex) result = OptimizeResult(x=x, success=converged, message=message, fun=F, nfev=nfev[0], nit=k) return result def _wrap_func(func, x0, fmerit, nfev_list, maxfev, args=()): """ Wrap a function and an initial value so that (i) complex values are wrapped to reals, and (ii) value for a merit function fmerit(x, f) is computed at the same time, (iii) iteration count is maintained and an exception is raised if it is exceeded. Parameters ---------- func : callable Function to wrap x0 : ndarray Initial value fmerit : callable Merit function fmerit(f) for computing merit value from residual. nfev_list : list List to store number of evaluations in. Should be [0] in the beginning. maxfev : int Maximum number of evaluations before _NoConvergence is raised. args : tuple Extra arguments to func Returns ------- wrap_func : callable Wrapped function, to be called as ``F, fp = wrap_func(x0)`` x0_wrap : ndarray of float Wrapped initial value; raveled to 1-D and complex values mapped to reals. x0_shape : tuple Shape of the initial value array f : float Merit function at F F : ndarray of float Residual at x0_wrap is_complex : bool Whether complex values were mapped to reals """ x0 = np.asarray(x0) x0_shape = x0.shape F = np.asarray(func(x0, *args)).ravel() is_complex = np.iscomplexobj(x0) or np.iscomplexobj(F) x0 = x0.ravel() nfev_list[0] = 1 if is_complex: def wrap_func(x): if nfev_list[0] >= maxfev: raise _NoConvergence() nfev_list[0] += 1 z = _real2complex(x).reshape(x0_shape) v = np.asarray(func(z, *args)).ravel() F = _complex2real(v) f = fmerit(F) return f, F x0 = _complex2real(x0) F = _complex2real(F) else: def wrap_func(x): if nfev_list[0] >= maxfev: raise _NoConvergence() nfev_list[0] += 1 x = x.reshape(x0_shape) F = np.asarray(func(x, *args)).ravel() f = fmerit(F) return f, F return wrap_func, x0, x0_shape, fmerit(F), F, is_complex def _wrap_result(result, is_complex, shape=None): """ Convert from real to complex and reshape result arrays. """ if is_complex: z = _real2complex(result) else: z = result if shape is not None: z = z.reshape(shape) return z def _real2complex(x): return np.ascontiguousarray(x, dtype=float).view(np.complex128) def _complex2real(z): return np.ascontiguousarray(z, dtype=complex).view(np.float64)
import numpy as np from numpy.testing import assert_allclose, assert_array_equal import pytest from scipy.fft import dct, idct, dctn, idctn, dst, idst, dstn, idstn import scipy.fft as fft from scipy import fftpack # scipy.fft wraps the fftpack versions but with normalized inverse transforms. # So, the forward transforms and definitions are already thoroughly tested in # fftpack/test_real_transforms.py @pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("n", [2, 3, 4, 5, 10, 16]) @pytest.mark.parametrize("axis", [0, 1]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_identity_1d(forward, backward, type, n, axis, norm): # Test the identity f^-1(f(x)) == x x = np.random.rand(n, n) y = forward(x, type, axis=axis, norm=norm) z = backward(y, type, axis=axis, norm=norm) assert_allclose(z, x) pad = [(0, 0)] * 2 pad[axis] = (0, 4) y2 = np.pad(y, pad, mode='edge') z2 = backward(y2, type, n, axis, norm) assert_allclose(z2, x) @pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64, np.complex64, np.complex128]) @pytest.mark.parametrize("axis", [0, 1]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) @pytest.mark.parametrize("overwrite_x", [True, False]) def test_identity_1d_overwrite(forward, backward, type, dtype, axis, norm, overwrite_x): # Test the identity f^-1(f(x)) == x x = np.random.rand(7, 8) x_orig = x.copy() y = forward(x, type, axis=axis, norm=norm, overwrite_x=overwrite_x) y_orig = y.copy() z = backward(y, type, axis=axis, norm=norm, overwrite_x=overwrite_x) if not overwrite_x: assert_allclose(z, x, rtol=1e-6, atol=1e-6) assert_array_equal(x, x_orig) assert_array_equal(y, y_orig) else: assert_allclose(z, x_orig, rtol=1e-6, atol=1e-6) @pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("shape, axes", [ ((4, 4), 0), ((4, 4), 1), ((4, 4), None), ((4, 4), (0, 1)), ((10, 12), None), ((10, 12), (0, 1)), ((4, 5, 6), None), ((4, 5, 6), 1), ((4, 5, 6), (0, 2)), ]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_identity_nd(forward, backward, type, shape, axes, norm): # Test the identity f^-1(f(x)) == x x = np.random.random(shape) if axes is not None: shape = np.take(shape, axes) y = forward(x, type, axes=axes, norm=norm) z = backward(y, type, axes=axes, norm=norm) assert_allclose(z, x) if axes is None: pad = [(0, 4)] * x.ndim elif isinstance(axes, int): pad = [(0, 0)] * x.ndim pad[axes] = (0, 4) else: pad = [(0, 0)] * x.ndim for a in axes: pad[a] = (0, 4) y2 = np.pad(y, pad, mode='edge') z2 = backward(y2, type, shape, axes, norm) assert_allclose(z2, x) @pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("shape, axes", [ ((4, 5), 0), ((4, 5), 1), ((4, 5), None), ]) @pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64, np.complex64, np.complex128]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) @pytest.mark.parametrize("overwrite_x", [False, True]) def test_identity_nd_overwrite(forward, backward, type, shape, axes, dtype, norm, overwrite_x): # Test the identity f^-1(f(x)) == x x = np.random.random(shape).astype(dtype) x_orig = x.copy() if axes is not None: shape = np.take(shape, axes) y = forward(x, type, axes=axes, norm=norm) y_orig = y.copy() z = backward(y, type, axes=axes, norm=norm) if overwrite_x: assert_allclose(z, x_orig, rtol=1e-6, atol=1e-6) else: assert_allclose(z, x, rtol=1e-6, atol=1e-6) assert_array_equal(x, x_orig) assert_array_equal(y, y_orig) @pytest.mark.parametrize("func", ['dct', 'dst', 'dctn', 'dstn']) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_fftpack_equivalience(func, type, norm): x = np.random.rand(8, 16) fft_res = getattr(fft, func)(x, type, norm=norm) fftpack_res = getattr(fftpack, func)(x, type, norm=norm) assert_allclose(fft_res, fftpack_res)
e-q/scipy
scipy/fft/tests/test_real_transforms.py
scipy/optimize/_spectral.py
# # Created by: Pearu Peterson, March 2002 # """ Test functions for scipy.linalg.matfuncs module """ import math import numpy as np from numpy import array, eye, exp, random from numpy.linalg import matrix_power from numpy.testing import ( assert_allclose, assert_, assert_array_almost_equal, assert_equal, assert_array_almost_equal_nulp, suppress_warnings) from scipy.sparse import csc_matrix, SparseEfficiencyWarning from scipy.sparse.construct import eye as speye from scipy.sparse.linalg.matfuncs import (expm, _expm, ProductOperator, MatrixPowerOperator, _onenorm_matrix_power_nnm) from scipy.sparse.sputils import matrix from scipy.linalg import logm from scipy.special import factorial, binom import scipy.sparse import scipy.sparse.linalg def _burkardt_13_power(n, p): """ A helper function for testing matrix functions. Parameters ---------- n : integer greater than 1 Order of the square matrix to be returned. p : non-negative integer Power of the matrix. Returns ------- out : ndarray representing a square matrix A Forsythe matrix of order n, raised to the power p. """ # Input validation. if n != int(n) or n < 2: raise ValueError('n must be an integer greater than 1') n = int(n) if p != int(p) or p < 0: raise ValueError('p must be a non-negative integer') p = int(p) # Construct the matrix explicitly. a, b = divmod(p, n) large = np.power(10.0, -n*a) small = large * np.power(10.0, -n) return np.diag([large]*(n-b), b) + np.diag([small]*b, b-n) def test_onenorm_matrix_power_nnm(): np.random.seed(1234) for n in range(1, 5): for p in range(5): M = np.random.random((n, n)) Mp = np.linalg.matrix_power(M, p) observed = _onenorm_matrix_power_nnm(M, p) expected = np.linalg.norm(Mp, 1) assert_allclose(observed, expected) class TestExpM(object): def test_zero_ndarray(self): a = array([[0.,0],[0,0]]) assert_array_almost_equal(expm(a),[[1,0],[0,1]]) def test_zero_sparse(self): a = csc_matrix([[0.,0],[0,0]]) assert_array_almost_equal(expm(a).toarray(),[[1,0],[0,1]]) def test_zero_matrix(self): a = matrix([[0.,0],[0,0]]) assert_array_almost_equal(expm(a),[[1,0],[0,1]]) def test_misc_types(self): A = expm(np.array([[1]])) assert_allclose(expm(((1,),)), A) assert_allclose(expm([[1]]), A) assert_allclose(expm(matrix([[1]])), A) assert_allclose(expm(np.array([[1]])), A) assert_allclose(expm(csc_matrix([[1]])).A, A) B = expm(np.array([[1j]])) assert_allclose(expm(((1j,),)), B) assert_allclose(expm([[1j]]), B) assert_allclose(expm(matrix([[1j]])), B) assert_allclose(expm(csc_matrix([[1j]])).A, B) def test_bidiagonal_sparse(self): A = csc_matrix([ [1, 3, 0], [0, 1, 5], [0, 0, 2]], dtype=float) e1 = math.exp(1) e2 = math.exp(2) expected = np.array([ [e1, 3*e1, 15*(e2 - 2*e1)], [0, e1, 5*(e2 - e1)], [0, 0, e2]], dtype=float) observed = expm(A).toarray() assert_array_almost_equal(observed, expected) def test_padecases_dtype_float(self): for dtype in [np.float32, np.float64]: for scale in [1e-2, 1e-1, 5e-1, 1, 10]: A = scale * eye(3, dtype=dtype) observed = expm(A) expected = exp(scale) * eye(3, dtype=dtype) assert_array_almost_equal_nulp(observed, expected, nulp=100) def test_padecases_dtype_complex(self): for dtype in [np.complex64, np.complex128]: for scale in [1e-2, 1e-1, 5e-1, 1, 10]: A = scale * eye(3, dtype=dtype) observed = expm(A) expected = exp(scale) * eye(3, dtype=dtype) assert_array_almost_equal_nulp(observed, expected, nulp=100) def test_padecases_dtype_sparse_float(self): # float32 and complex64 lead to errors in spsolve/UMFpack dtype = np.float64 for scale in [1e-2, 1e-1, 5e-1, 1, 10]: a = scale * speye(3, 3, dtype=dtype, format='csc') e = exp(scale) * eye(3, dtype=dtype) with suppress_warnings() as sup: sup.filter(SparseEfficiencyWarning, "Changing the sparsity structure of a csc_matrix is expensive.") exact_onenorm = _expm(a, use_exact_onenorm=True).toarray() inexact_onenorm = _expm(a, use_exact_onenorm=False).toarray() assert_array_almost_equal_nulp(exact_onenorm, e, nulp=100) assert_array_almost_equal_nulp(inexact_onenorm, e, nulp=100) def test_padecases_dtype_sparse_complex(self): # float32 and complex64 lead to errors in spsolve/UMFpack dtype = np.complex128 for scale in [1e-2, 1e-1, 5e-1, 1, 10]: a = scale * speye(3, 3, dtype=dtype, format='csc') e = exp(scale) * eye(3, dtype=dtype) with suppress_warnings() as sup: sup.filter(SparseEfficiencyWarning, "Changing the sparsity structure of a csc_matrix is expensive.") assert_array_almost_equal_nulp(expm(a).toarray(), e, nulp=100) def test_logm_consistency(self): random.seed(1234) for dtype in [np.float64, np.complex128]: for n in range(1, 10): for scale in [1e-4, 1e-3, 1e-2, 1e-1, 1, 1e1, 1e2]: # make logm(A) be of a given scale A = (eye(n) + random.rand(n, n) * scale).astype(dtype) if np.iscomplexobj(A): A = A + 1j * random.rand(n, n) * scale assert_array_almost_equal(expm(logm(A)), A) def test_integer_matrix(self): Q = np.array([ [-3, 1, 1, 1], [1, -3, 1, 1], [1, 1, -3, 1], [1, 1, 1, -3]]) assert_allclose(expm(Q), expm(1.0 * Q)) def test_integer_matrix_2(self): # Check for integer overflows Q = np.array([[-500, 500, 0, 0], [0, -550, 360, 190], [0, 630, -630, 0], [0, 0, 0, 0]], dtype=np.int16) assert_allclose(expm(Q), expm(1.0 * Q)) Q = csc_matrix(Q) assert_allclose(expm(Q).A, expm(1.0 * Q).A) def test_triangularity_perturbation(self): # Experiment (1) of # Awad H. Al-Mohy and Nicholas J. Higham (2012) # Improved Inverse Scaling and Squaring Algorithms # for the Matrix Logarithm. A = np.array([ [3.2346e-1, 3e4, 3e4, 3e4], [0, 3.0089e-1, 3e4, 3e4], [0, 0, 3.221e-1, 3e4], [0, 0, 0, 3.0744e-1]], dtype=float) A_logm = np.array([ [-1.12867982029050462e+00, 9.61418377142025565e+04, -4.52485573953179264e+09, 2.92496941103871812e+14], [0.00000000000000000e+00, -1.20101052953082288e+00, 9.63469687211303099e+04, -4.68104828911105442e+09], [0.00000000000000000e+00, 0.00000000000000000e+00, -1.13289322264498393e+00, 9.53249183094775653e+04], [0.00000000000000000e+00, 0.00000000000000000e+00, 0.00000000000000000e+00, -1.17947533272554850e+00]], dtype=float) assert_allclose(expm(A_logm), A, rtol=1e-4) # Perturb the upper triangular matrix by tiny amounts, # so that it becomes technically not upper triangular. random.seed(1234) tiny = 1e-17 A_logm_perturbed = A_logm.copy() A_logm_perturbed[1, 0] = tiny with suppress_warnings() as sup: sup.filter(RuntimeWarning, "Ill-conditioned.*") A_expm_logm_perturbed = expm(A_logm_perturbed) rtol = 1e-4 atol = 100 * tiny assert_(not np.allclose(A_expm_logm_perturbed, A, rtol=rtol, atol=atol)) def test_burkardt_1(self): # This matrix is diagonal. # The calculation of the matrix exponential is simple. # # This is the first of a series of matrix exponential tests # collected by John Burkardt from the following sources. # # Alan Laub, # Review of "Linear System Theory" by Joao Hespanha, # SIAM Review, # Volume 52, Number 4, December 2010, pages 779--781. # # Cleve Moler and Charles Van Loan, # Nineteen Dubious Ways to Compute the Exponential of a Matrix, # Twenty-Five Years Later, # SIAM Review, # Volume 45, Number 1, March 2003, pages 3--49. # # Cleve Moler, # Cleve's Corner: A Balancing Act for the Matrix Exponential, # 23 July 2012. # # Robert Ward, # Numerical computation of the matrix exponential # with accuracy estimate, # SIAM Journal on Numerical Analysis, # Volume 14, Number 4, September 1977, pages 600--610. exp1 = np.exp(1) exp2 = np.exp(2) A = np.array([ [1, 0], [0, 2], ], dtype=float) desired = np.array([ [exp1, 0], [0, exp2], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_2(self): # This matrix is symmetric. # The calculation of the matrix exponential is straightforward. A = np.array([ [1, 3], [3, 2], ], dtype=float) desired = np.array([ [39.322809708033859, 46.166301438885753], [46.166301438885768, 54.711576854329110], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_3(self): # This example is due to Laub. # This matrix is ill-suited for the Taylor series approach. # As powers of A are computed, the entries blow up too quickly. exp1 = np.exp(1) exp39 = np.exp(39) A = np.array([ [0, 1], [-39, -40], ], dtype=float) desired = np.array([ [ 39/(38*exp1) - 1/(38*exp39), -np.expm1(-38) / (38*exp1)], [ 39*np.expm1(-38) / (38*exp1), -1/(38*exp1) + 39/(38*exp39)], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_4(self): # This example is due to Moler and Van Loan. # The example will cause problems for the series summation approach, # as well as for diagonal Pade approximations. A = np.array([ [-49, 24], [-64, 31], ], dtype=float) U = np.array([[3, 1], [4, 2]], dtype=float) V = np.array([[1, -1/2], [-2, 3/2]], dtype=float) w = np.array([-17, -1], dtype=float) desired = np.dot(U * np.exp(w), V) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_5(self): # This example is due to Moler and Van Loan. # This matrix is strictly upper triangular # All powers of A are zero beyond some (low) limit. # This example will cause problems for Pade approximations. A = np.array([ [0, 6, 0, 0], [0, 0, 6, 0], [0, 0, 0, 6], [0, 0, 0, 0], ], dtype=float) desired = np.array([ [1, 6, 18, 36], [0, 1, 6, 18], [0, 0, 1, 6], [0, 0, 0, 1], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_6(self): # This example is due to Moler and Van Loan. # This matrix does not have a complete set of eigenvectors. # That means the eigenvector approach will fail. exp1 = np.exp(1) A = np.array([ [1, 1], [0, 1], ], dtype=float) desired = np.array([ [exp1, exp1], [0, exp1], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_7(self): # This example is due to Moler and Van Loan. # This matrix is very close to example 5. # Mathematically, it has a complete set of eigenvectors. # Numerically, however, the calculation will be suspect. exp1 = np.exp(1) eps = np.spacing(1) A = np.array([ [1 + eps, 1], [0, 1 - eps], ], dtype=float) desired = np.array([ [exp1, exp1], [0, exp1], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_8(self): # This matrix was an example in Wikipedia. exp4 = np.exp(4) exp16 = np.exp(16) A = np.array([ [21, 17, 6], [-5, -1, -6], [4, 4, 16], ], dtype=float) desired = np.array([ [13*exp16 - exp4, 13*exp16 - 5*exp4, 2*exp16 - 2*exp4], [-9*exp16 + exp4, -9*exp16 + 5*exp4, -2*exp16 + 2*exp4], [16*exp16, 16*exp16, 4*exp16], ], dtype=float) * 0.25 actual = expm(A) assert_allclose(actual, desired) def test_burkardt_9(self): # This matrix is due to the NAG Library. # It is an example for function F01ECF. A = np.array([ [1, 2, 2, 2], [3, 1, 1, 2], [3, 2, 1, 2], [3, 3, 3, 1], ], dtype=float) desired = np.array([ [740.7038, 610.8500, 542.2743, 549.1753], [731.2510, 603.5524, 535.0884, 542.2743], [823.7630, 679.4257, 603.5524, 610.8500], [998.4355, 823.7630, 731.2510, 740.7038], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_10(self): # This is Ward's example #1. # It is defective and nonderogatory. A = np.array([ [4, 2, 0], [1, 4, 1], [1, 1, 4], ], dtype=float) assert_allclose(sorted(scipy.linalg.eigvals(A)), (3, 3, 6)) desired = np.array([ [147.8666224463699, 183.7651386463682, 71.79703239999647], [127.7810855231823, 183.7651386463682, 91.88256932318415], [127.7810855231824, 163.6796017231806, 111.9681062463718], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_11(self): # This is Ward's example #2. # It is a symmetric matrix. A = np.array([ [29.87942128909879, 0.7815750847907159, -2.289519314033932], [0.7815750847907159, 25.72656945571064, 8.680737820540137], [-2.289519314033932, 8.680737820540137, 34.39400925519054], ], dtype=float) assert_allclose(scipy.linalg.eigvalsh(A), (20, 30, 40)) desired = np.array([ [ 5.496313853692378E+15, -1.823188097200898E+16, -3.047577080858001E+16], [ -1.823188097200899E+16, 6.060522870222108E+16, 1.012918429302482E+17], [ -3.047577080858001E+16, 1.012918429302482E+17, 1.692944112408493E+17], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_12(self): # This is Ward's example #3. # Ward's algorithm has difficulty estimating the accuracy # of its results. A = np.array([ [-131, 19, 18], [-390, 56, 54], [-387, 57, 52], ], dtype=float) assert_allclose(sorted(scipy.linalg.eigvals(A)), (-20, -2, -1)) desired = np.array([ [-1.509644158793135, 0.3678794391096522, 0.1353352811751005], [-5.632570799891469, 1.471517758499875, 0.4060058435250609], [-4.934938326088363, 1.103638317328798, 0.5413411267617766], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_13(self): # This is Ward's example #4. # This is a version of the Forsythe matrix. # The eigenvector problem is badly conditioned. # Ward's algorithm has difficulty esimating the accuracy # of its results for this problem. # # Check the construction of one instance of this family of matrices. A4_actual = _burkardt_13_power(4, 1) A4_desired = [[0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1], [1e-4, 0, 0, 0]] assert_allclose(A4_actual, A4_desired) # Check the expm for a few instances. for n in (2, 3, 4, 10): # Approximate expm using Taylor series. # This works well for this matrix family # because each matrix in the summation, # even before dividing by the factorial, # is entrywise positive with max entry 10**(-floor(p/n)*n). k = max(1, int(np.ceil(16/n))) desired = np.zeros((n, n), dtype=float) for p in range(n*k): Ap = _burkardt_13_power(n, p) assert_equal(np.min(Ap), 0) assert_allclose(np.max(Ap), np.power(10, -np.floor(p/n)*n)) desired += Ap / factorial(p) actual = expm(_burkardt_13_power(n, 1)) assert_allclose(actual, desired) def test_burkardt_14(self): # This is Moler's example. # This badly scaled matrix caused problems for MATLAB's expm(). A = np.array([ [0, 1e-8, 0], [-(2e10 + 4e8/6.), -3, 2e10], [200./3., 0, -200./3.], ], dtype=float) desired = np.array([ [0.446849468283175, 1.54044157383952e-09, 0.462811453558774], [-5743067.77947947, -0.0152830038686819, -4526542.71278401], [0.447722977849494, 1.54270484519591e-09, 0.463480648837651], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_pascal(self): # Test pascal triangle. # Nilpotent exponential, used to trigger a failure (gh-8029) for scale in [1.0, 1e-3, 1e-6]: for n in range(0, 80, 3): sc = scale ** np.arange(n, -1, -1) if np.any(sc < 1e-300): break A = np.diag(np.arange(1, n + 1), -1) * scale B = expm(A) got = B expected = binom(np.arange(n + 1)[:,None], np.arange(n + 1)[None,:]) * sc[None,:] / sc[:,None] atol = 1e-13 * abs(expected).max() assert_allclose(got, expected, atol=atol) def test_matrix_input(self): # Large np.matrix inputs should work, gh-5546 A = np.zeros((200, 200)) A[-1,0] = 1 B0 = expm(A) with suppress_warnings() as sup: sup.filter(DeprecationWarning, "the matrix subclass.*") sup.filter(PendingDeprecationWarning, "the matrix subclass.*") B = expm(np.matrix(A)) assert_allclose(B, B0) def test_exp_sinch_overflow(self): # Check overflow in intermediate steps is fixed (gh-11839) L = np.array([[1.0, -0.5, -0.5, 0.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, -0.5, -0.5, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0, 0.0, -0.5, -0.5], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]]) E0 = expm(-L) E1 = expm(-2**11 * L) E2 = E0 for j in range(11): E2 = E2 @ E2 assert_allclose(E1, E2) class TestOperators(object): def test_product_operator(self): random.seed(1234) n = 5 k = 2 nsamples = 10 for i in range(nsamples): A = np.random.randn(n, n) B = np.random.randn(n, n) C = np.random.randn(n, n) D = np.random.randn(n, k) op = ProductOperator(A, B, C) assert_allclose(op.matmat(D), A.dot(B).dot(C).dot(D)) assert_allclose(op.T.matmat(D), (A.dot(B).dot(C)).T.dot(D)) def test_matrix_power_operator(self): random.seed(1234) n = 5 k = 2 p = 3 nsamples = 10 for i in range(nsamples): A = np.random.randn(n, n) B = np.random.randn(n, k) op = MatrixPowerOperator(A, p) assert_allclose(op.matmat(B), matrix_power(A, p).dot(B)) assert_allclose(op.T.matmat(B), matrix_power(A, p).T.dot(B))
import numpy as np from numpy.testing import assert_allclose, assert_array_equal import pytest from scipy.fft import dct, idct, dctn, idctn, dst, idst, dstn, idstn import scipy.fft as fft from scipy import fftpack # scipy.fft wraps the fftpack versions but with normalized inverse transforms. # So, the forward transforms and definitions are already thoroughly tested in # fftpack/test_real_transforms.py @pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("n", [2, 3, 4, 5, 10, 16]) @pytest.mark.parametrize("axis", [0, 1]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_identity_1d(forward, backward, type, n, axis, norm): # Test the identity f^-1(f(x)) == x x = np.random.rand(n, n) y = forward(x, type, axis=axis, norm=norm) z = backward(y, type, axis=axis, norm=norm) assert_allclose(z, x) pad = [(0, 0)] * 2 pad[axis] = (0, 4) y2 = np.pad(y, pad, mode='edge') z2 = backward(y2, type, n, axis, norm) assert_allclose(z2, x) @pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64, np.complex64, np.complex128]) @pytest.mark.parametrize("axis", [0, 1]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) @pytest.mark.parametrize("overwrite_x", [True, False]) def test_identity_1d_overwrite(forward, backward, type, dtype, axis, norm, overwrite_x): # Test the identity f^-1(f(x)) == x x = np.random.rand(7, 8) x_orig = x.copy() y = forward(x, type, axis=axis, norm=norm, overwrite_x=overwrite_x) y_orig = y.copy() z = backward(y, type, axis=axis, norm=norm, overwrite_x=overwrite_x) if not overwrite_x: assert_allclose(z, x, rtol=1e-6, atol=1e-6) assert_array_equal(x, x_orig) assert_array_equal(y, y_orig) else: assert_allclose(z, x_orig, rtol=1e-6, atol=1e-6) @pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("shape, axes", [ ((4, 4), 0), ((4, 4), 1), ((4, 4), None), ((4, 4), (0, 1)), ((10, 12), None), ((10, 12), (0, 1)), ((4, 5, 6), None), ((4, 5, 6), 1), ((4, 5, 6), (0, 2)), ]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_identity_nd(forward, backward, type, shape, axes, norm): # Test the identity f^-1(f(x)) == x x = np.random.random(shape) if axes is not None: shape = np.take(shape, axes) y = forward(x, type, axes=axes, norm=norm) z = backward(y, type, axes=axes, norm=norm) assert_allclose(z, x) if axes is None: pad = [(0, 4)] * x.ndim elif isinstance(axes, int): pad = [(0, 0)] * x.ndim pad[axes] = (0, 4) else: pad = [(0, 0)] * x.ndim for a in axes: pad[a] = (0, 4) y2 = np.pad(y, pad, mode='edge') z2 = backward(y2, type, shape, axes, norm) assert_allclose(z2, x) @pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("shape, axes", [ ((4, 5), 0), ((4, 5), 1), ((4, 5), None), ]) @pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64, np.complex64, np.complex128]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) @pytest.mark.parametrize("overwrite_x", [False, True]) def test_identity_nd_overwrite(forward, backward, type, shape, axes, dtype, norm, overwrite_x): # Test the identity f^-1(f(x)) == x x = np.random.random(shape).astype(dtype) x_orig = x.copy() if axes is not None: shape = np.take(shape, axes) y = forward(x, type, axes=axes, norm=norm) y_orig = y.copy() z = backward(y, type, axes=axes, norm=norm) if overwrite_x: assert_allclose(z, x_orig, rtol=1e-6, atol=1e-6) else: assert_allclose(z, x, rtol=1e-6, atol=1e-6) assert_array_equal(x, x_orig) assert_array_equal(y, y_orig) @pytest.mark.parametrize("func", ['dct', 'dst', 'dctn', 'dstn']) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_fftpack_equivalience(func, type, norm): x = np.random.rand(8, 16) fft_res = getattr(fft, func)(x, type, norm=norm) fftpack_res = getattr(fftpack, func)(x, type, norm=norm) assert_allclose(fft_res, fftpack_res)
e-q/scipy
scipy/fft/tests/test_real_transforms.py
scipy/sparse/linalg/tests/test_matfuncs.py
""" fitpack --- curve and surface fitting with splines fitpack is based on a collection of Fortran routines DIERCKX by P. Dierckx (see http://www.netlib.org/dierckx/) transformed to double routines by Pearu Peterson. """ # Created by Pearu Peterson, June,August 2003 __all__ = [ 'UnivariateSpline', 'InterpolatedUnivariateSpline', 'LSQUnivariateSpline', 'BivariateSpline', 'LSQBivariateSpline', 'SmoothBivariateSpline', 'LSQSphereBivariateSpline', 'SmoothSphereBivariateSpline', 'RectBivariateSpline', 'RectSphereBivariateSpline'] import warnings from numpy import zeros, concatenate, ravel, diff, array, ones import numpy as np from . import fitpack from . import dfitpack dfitpack_int = dfitpack.types.intvar.dtype # ############### Univariate spline #################### _curfit_messages = {1: """ The required storage space exceeds the available storage space, as specified by the parameter nest: nest too small. If nest is already large (say nest > m/2), it may also indicate that s is too small. The approximation returned is the weighted least-squares spline according to the knots t[0],t[1],...,t[n-1]. (n=nest) the parameter fp gives the corresponding weighted sum of squared residuals (fp>s). """, 2: """ A theoretically impossible result was found during the iteration process for finding a smoothing spline with fp = s: s too small. There is an approximation returned but the corresponding weighted sum of squared residuals does not satisfy the condition abs(fp-s)/s < tol.""", 3: """ The maximal number of iterations maxit (set to 20 by the program) allowed for finding a smoothing spline with fp=s has been reached: s too small. There is an approximation returned but the corresponding weighted sum of squared residuals does not satisfy the condition abs(fp-s)/s < tol.""", 10: """ Error on entry, no approximation returned. The following conditions must hold: xb<=x[0]<x[1]<...<x[m-1]<=xe, w[i]>0, i=0..m-1 if iopt=-1: xb<t[k+1]<t[k+2]<...<t[n-k-2]<xe""" } # UnivariateSpline, ext parameter can be an int or a string _extrap_modes = {0: 0, 'extrapolate': 0, 1: 1, 'zeros': 1, 2: 2, 'raise': 2, 3: 3, 'const': 3} class UnivariateSpline(object): """ 1-D smoothing spline fit to a given set of data points. Fits a spline y = spl(x) of degree `k` to the provided `x`, `y` data. `s` specifies the number of knots by specifying a smoothing condition. Parameters ---------- x : (N,) array_like 1-D array of independent input data. Must be increasing; must be strictly increasing if `s` is 0. y : (N,) array_like 1-D array of dependent input data, of the same length as `x`. w : (N,) array_like, optional Weights for spline fitting. Must be positive. If None (default), weights are all equal. bbox : (2,) array_like, optional 2-sequence specifying the boundary of the approximation interval. If None (default), ``bbox=[x[0], x[-1]]``. k : int, optional Degree of the smoothing spline. Must be 1 <= `k` <= 5. Default is `k` = 3, a cubic spline. s : float or None, optional Positive smoothing factor used to choose the number of knots. Number of knots will be increased until the smoothing condition is satisfied:: sum((w[i] * (y[i]-spl(x[i])))**2, axis=0) <= s If None (default), ``s = len(w)`` which should be a good value if ``1/w[i]`` is an estimate of the standard deviation of ``y[i]``. If 0, spline will interpolate through all data points. ext : int or str, optional Controls the extrapolation mode for elements not in the interval defined by the knot sequence. * if ext=0 or 'extrapolate', return the extrapolated value. * if ext=1 or 'zeros', return 0 * if ext=2 or 'raise', raise a ValueError * if ext=3 of 'const', return the boundary value. The default value is 0. check_finite : bool, optional Whether to check that the input arrays contain only finite numbers. Disabling may give a performance gain, but may result in problems (crashes, non-termination or non-sensical results) if the inputs do contain infinities or NaNs. Default is False. See Also -------- BivariateSpline : a base class for bivariate splines. SmoothBivariateSpline : a smoothing bivariate spline through the given points LSQBivariateSpline : a bivariate spline using weighted least-squares fitting RectSphereBivariateSpline : a bivariate spline over a rectangular mesh on a sphere SmoothSphereBivariateSpline : a smoothing bivariate spline in spherical coordinates LSQSphereBivariateSpline : a bivariate spline in spherical coordinates using weighted least-squares fitting RectBivariateSpline : a bivariate spline over a rectangular mesh InterpolatedUnivariateSpline : a interpolating univariate spline for a given set of data points. bisplrep : a function to find a bivariate B-spline representation of a surface bisplev : a function to evaluate a bivariate B-spline and its derivatives splrep : a function to find the B-spline representation of a 1-D curve splev : a function to evaluate a B-spline or its derivatives sproot : a function to find the roots of a cubic B-spline splint : a function to evaluate the definite integral of a B-spline between two given points spalde : a function to evaluate all derivatives of a B-spline Notes ----- The number of data points must be larger than the spline degree `k`. **NaN handling**: If the input arrays contain ``nan`` values, the result is not useful, since the underlying spline fitting routines cannot deal with ``nan``. A workaround is to use zero weights for not-a-number data points: >>> from scipy.interpolate import UnivariateSpline >>> x, y = np.array([1, 2, 3, 4]), np.array([1, np.nan, 3, 4]) >>> w = np.isnan(y) >>> y[w] = 0. >>> spl = UnivariateSpline(x, y, w=~w) Notice the need to replace a ``nan`` by a numerical value (precise value does not matter as long as the corresponding weight is zero.) Examples -------- >>> import matplotlib.pyplot as plt >>> from scipy.interpolate import UnivariateSpline >>> x = np.linspace(-3, 3, 50) >>> y = np.exp(-x**2) + 0.1 * np.random.randn(50) >>> plt.plot(x, y, 'ro', ms=5) Use the default value for the smoothing parameter: >>> spl = UnivariateSpline(x, y) >>> xs = np.linspace(-3, 3, 1000) >>> plt.plot(xs, spl(xs), 'g', lw=3) Manually change the amount of smoothing: >>> spl.set_smoothing_factor(0.5) >>> plt.plot(xs, spl(xs), 'b', lw=3) >>> plt.show() """ def __init__(self, x, y, w=None, bbox=[None]*2, k=3, s=None, ext=0, check_finite=False): x, y, w, bbox, self.ext = self.validate_input(x, y, w, bbox, k, s, ext, check_finite) # _data == x,y,w,xb,xe,k,s,n,t,c,fp,fpint,nrdata,ier data = dfitpack.fpcurf0(x, y, k, w=w, xb=bbox[0], xe=bbox[1], s=s) if data[-1] == 1: # nest too small, setting to maximum bound data = self._reset_nest(data) self._data = data self._reset_class() @staticmethod def validate_input(x, y, w, bbox, k, s, ext, check_finite): x, y, bbox = np.asarray(x), np.asarray(y), np.asarray(bbox) if w is not None: w = np.asarray(w) if check_finite: w_finite = np.isfinite(w).all() if w is not None else True if (not np.isfinite(x).all() or not np.isfinite(y).all() or not w_finite): raise ValueError("x and y array must not contain " "NaNs or infs.") if s is None or s > 0: if not np.all(diff(x) >= 0.0): raise ValueError("x must be increasing if s > 0") else: if not np.all(diff(x) > 0.0): raise ValueError("x must be strictly increasing if s = 0") if x.size != y.size: raise ValueError("x and y should have a same length") elif w is not None and not x.size == y.size == w.size: raise ValueError("x, y, and w should have a same length") elif bbox.shape != (2,): raise ValueError("bbox shape should be (2,)") elif not (1 <= k <= 5): raise ValueError("k should be 1 <= k <= 5") elif s is not None and not s >= 0.0: raise ValueError("s should be s >= 0.0") try: ext = _extrap_modes[ext] except KeyError as e: raise ValueError("Unknown extrapolation mode %s." % ext) from e return x, y, w, bbox, ext @classmethod def _from_tck(cls, tck, ext=0): """Construct a spline object from given tck""" self = cls.__new__(cls) t, c, k = tck self._eval_args = tck # _data == x,y,w,xb,xe,k,s,n,t,c,fp,fpint,nrdata,ier self._data = (None, None, None, None, None, k, None, len(t), t, c, None, None, None, None) self.ext = ext return self def _reset_class(self): data = self._data n, t, c, k, ier = data[7], data[8], data[9], data[5], data[-1] self._eval_args = t[:n], c[:n], k if ier == 0: # the spline returned has a residual sum of squares fp # such that abs(fp-s)/s <= tol with tol a relative # tolerance set to 0.001 by the program pass elif ier == -1: # the spline returned is an interpolating spline self._set_class(InterpolatedUnivariateSpline) elif ier == -2: # the spline returned is the weighted least-squares # polynomial of degree k. In this extreme case fp gives # the upper bound fp0 for the smoothing factor s. self._set_class(LSQUnivariateSpline) else: # error if ier == 1: self._set_class(LSQUnivariateSpline) message = _curfit_messages.get(ier, 'ier=%s' % (ier)) warnings.warn(message) def _set_class(self, cls): self._spline_class = cls if self.__class__ in (UnivariateSpline, InterpolatedUnivariateSpline, LSQUnivariateSpline): self.__class__ = cls else: # It's an unknown subclass -- don't change class. cf. #731 pass def _reset_nest(self, data, nest=None): n = data[10] if nest is None: k, m = data[5], len(data[0]) nest = m+k+1 # this is the maximum bound for nest else: if not n <= nest: raise ValueError("`nest` can only be increased") t, c, fpint, nrdata = [np.resize(data[j], nest) for j in [8, 9, 11, 12]] args = data[:8] + (t, c, n, fpint, nrdata, data[13]) data = dfitpack.fpcurf1(*args) return data def set_smoothing_factor(self, s): """ Continue spline computation with the given smoothing factor s and with the knots found at the last call. This routine modifies the spline in place. """ data = self._data if data[6] == -1: warnings.warn('smoothing factor unchanged for' 'LSQ spline with fixed knots') return args = data[:6] + (s,) + data[7:] data = dfitpack.fpcurf1(*args) if data[-1] == 1: # nest too small, setting to maximum bound data = self._reset_nest(data) self._data = data self._reset_class() def __call__(self, x, nu=0, ext=None): """ Evaluate spline (or its nu-th derivative) at positions x. Parameters ---------- x : array_like A 1-D array of points at which to return the value of the smoothed spline or its derivatives. Note: `x` can be unordered but the evaluation is more efficient if `x` is (partially) ordered. nu : int The order of derivative of the spline to compute. ext : int Controls the value returned for elements of `x` not in the interval defined by the knot sequence. * if ext=0 or 'extrapolate', return the extrapolated value. * if ext=1 or 'zeros', return 0 * if ext=2 or 'raise', raise a ValueError * if ext=3 or 'const', return the boundary value. The default value is 0, passed from the initialization of UnivariateSpline. """ x = np.asarray(x) # empty input yields empty output if x.size == 0: return array([]) if ext is None: ext = self.ext else: try: ext = _extrap_modes[ext] except KeyError as e: raise ValueError("Unknown extrapolation mode %s." % ext) from e return fitpack.splev(x, self._eval_args, der=nu, ext=ext) def get_knots(self): """ Return positions of interior knots of the spline. Internally, the knot vector contains ``2*k`` additional boundary knots. """ data = self._data k, n = data[5], data[7] return data[8][k:n-k] def get_coeffs(self): """Return spline coefficients.""" data = self._data k, n = data[5], data[7] return data[9][:n-k-1] def get_residual(self): """Return weighted sum of squared residuals of the spline approximation. This is equivalent to:: sum((w[i] * (y[i]-spl(x[i])))**2, axis=0) """ return self._data[10] def integral(self, a, b): """ Return definite integral of the spline between two given points. Parameters ---------- a : float Lower limit of integration. b : float Upper limit of integration. Returns ------- integral : float The value of the definite integral of the spline between limits. Examples -------- >>> from scipy.interpolate import UnivariateSpline >>> x = np.linspace(0, 3, 11) >>> y = x**2 >>> spl = UnivariateSpline(x, y) >>> spl.integral(0, 3) 9.0 which agrees with :math:`\\int x^2 dx = x^3 / 3` between the limits of 0 and 3. A caveat is that this routine assumes the spline to be zero outside of the data limits: >>> spl.integral(-1, 4) 9.0 >>> spl.integral(-1, 0) 0.0 """ return dfitpack.splint(*(self._eval_args+(a, b))) def derivatives(self, x): """ Return all derivatives of the spline at the point x. Parameters ---------- x : float The point to evaluate the derivatives at. Returns ------- der : ndarray, shape(k+1,) Derivatives of the orders 0 to k. Examples -------- >>> from scipy.interpolate import UnivariateSpline >>> x = np.linspace(0, 3, 11) >>> y = x**2 >>> spl = UnivariateSpline(x, y) >>> spl.derivatives(1.5) array([2.25, 3.0, 2.0, 0]) """ d, ier = dfitpack.spalde(*(self._eval_args+(x,))) if not ier == 0: raise ValueError("Error code returned by spalde: %s" % ier) return d def roots(self): """ Return the zeros of the spline. Restriction: only cubic splines are supported by fitpack. """ k = self._data[5] if k == 3: z, m, ier = dfitpack.sproot(*self._eval_args[:2]) if not ier == 0: raise ValueError("Error code returned by spalde: %s" % ier) return z[:m] raise NotImplementedError('finding roots unsupported for ' 'non-cubic splines') def derivative(self, n=1): """ Construct a new spline representing the derivative of this spline. Parameters ---------- n : int, optional Order of derivative to evaluate. Default: 1 Returns ------- spline : UnivariateSpline Spline of order k2=k-n representing the derivative of this spline. See Also -------- splder, antiderivative Notes ----- .. versionadded:: 0.13.0 Examples -------- This can be used for finding maxima of a curve: >>> from scipy.interpolate import UnivariateSpline >>> x = np.linspace(0, 10, 70) >>> y = np.sin(x) >>> spl = UnivariateSpline(x, y, k=4, s=0) Now, differentiate the spline and find the zeros of the derivative. (NB: `sproot` only works for order 3 splines, so we fit an order 4 spline): >>> spl.derivative().roots() / np.pi array([ 0.50000001, 1.5 , 2.49999998]) This agrees well with roots :math:`\\pi/2 + n\\pi` of :math:`\\cos(x) = \\sin'(x)`. """ tck = fitpack.splder(self._eval_args, n) # if self.ext is 'const', derivative.ext will be 'zeros' ext = 1 if self.ext == 3 else self.ext return UnivariateSpline._from_tck(tck, ext=ext) def antiderivative(self, n=1): """ Construct a new spline representing the antiderivative of this spline. Parameters ---------- n : int, optional Order of antiderivative to evaluate. Default: 1 Returns ------- spline : UnivariateSpline Spline of order k2=k+n representing the antiderivative of this spline. Notes ----- .. versionadded:: 0.13.0 See Also -------- splantider, derivative Examples -------- >>> from scipy.interpolate import UnivariateSpline >>> x = np.linspace(0, np.pi/2, 70) >>> y = 1 / np.sqrt(1 - 0.8*np.sin(x)**2) >>> spl = UnivariateSpline(x, y, s=0) The derivative is the inverse operation of the antiderivative, although some floating point error accumulates: >>> spl(1.7), spl.antiderivative().derivative()(1.7) (array(2.1565429877197317), array(2.1565429877201865)) Antiderivative can be used to evaluate definite integrals: >>> ispl = spl.antiderivative() >>> ispl(np.pi/2) - ispl(0) 2.2572053588768486 This is indeed an approximation to the complete elliptic integral :math:`K(m) = \\int_0^{\\pi/2} [1 - m\\sin^2 x]^{-1/2} dx`: >>> from scipy.special import ellipk >>> ellipk(0.8) 2.2572053268208538 """ tck = fitpack.splantider(self._eval_args, n) return UnivariateSpline._from_tck(tck, self.ext) class InterpolatedUnivariateSpline(UnivariateSpline): """ 1-D interpolating spline for a given set of data points. Fits a spline y = spl(x) of degree `k` to the provided `x`, `y` data. Spline function passes through all provided points. Equivalent to `UnivariateSpline` with s=0. Parameters ---------- x : (N,) array_like Input dimension of data points -- must be strictly increasing y : (N,) array_like input dimension of data points w : (N,) array_like, optional Weights for spline fitting. Must be positive. If None (default), weights are all equal. bbox : (2,) array_like, optional 2-sequence specifying the boundary of the approximation interval. If None (default), ``bbox=[x[0], x[-1]]``. k : int, optional Degree of the smoothing spline. Must be 1 <= `k` <= 5. ext : int or str, optional Controls the extrapolation mode for elements not in the interval defined by the knot sequence. * if ext=0 or 'extrapolate', return the extrapolated value. * if ext=1 or 'zeros', return 0 * if ext=2 or 'raise', raise a ValueError * if ext=3 of 'const', return the boundary value. The default value is 0. check_finite : bool, optional Whether to check that the input arrays contain only finite numbers. Disabling may give a performance gain, but may result in problems (crashes, non-termination or non-sensical results) if the inputs do contain infinities or NaNs. Default is False. See Also -------- UnivariateSpline : a smooth univariate spline to fit a given set of data points. LSQUnivariateSpline : a spline for which knots are user-selected SmoothBivariateSpline : a smoothing bivariate spline through the given points LSQBivariateSpline : a bivariate spline using weighted least-squares fitting splrep : a function to find the B-spline representation of a 1-D curve splev : a function to evaluate a B-spline or its derivatives sproot : a function to find the roots of a cubic B-spline splint : a function to evaluate the definite integral of a B-spline between two given points spalde : a function to evaluate all derivatives of a B-spline Notes ----- The number of data points must be larger than the spline degree `k`. Examples -------- >>> import matplotlib.pyplot as plt >>> from scipy.interpolate import InterpolatedUnivariateSpline >>> x = np.linspace(-3, 3, 50) >>> y = np.exp(-x**2) + 0.1 * np.random.randn(50) >>> spl = InterpolatedUnivariateSpline(x, y) >>> plt.plot(x, y, 'ro', ms=5) >>> xs = np.linspace(-3, 3, 1000) >>> plt.plot(xs, spl(xs), 'g', lw=3, alpha=0.7) >>> plt.show() Notice that the ``spl(x)`` interpolates `y`: >>> spl.get_residual() 0.0 """ def __init__(self, x, y, w=None, bbox=[None]*2, k=3, ext=0, check_finite=False): x, y, w, bbox, self.ext = self.validate_input(x, y, w, bbox, k, None, ext, check_finite) if not np.all(diff(x) > 0.0): raise ValueError('x must be strictly increasing') # _data == x,y,w,xb,xe,k,s,n,t,c,fp,fpint,nrdata,ier self._data = dfitpack.fpcurf0(x, y, k, w=w, xb=bbox[0], xe=bbox[1], s=0) self._reset_class() _fpchec_error_string = """The input parameters have been rejected by fpchec. \ This means that at least one of the following conditions is violated: 1) k+1 <= n-k-1 <= m 2) t(1) <= t(2) <= ... <= t(k+1) t(n-k) <= t(n-k+1) <= ... <= t(n) 3) t(k+1) < t(k+2) < ... < t(n-k) 4) t(k+1) <= x(i) <= t(n-k) 5) The conditions specified by Schoenberg and Whitney must hold for at least one subset of data points, i.e., there must be a subset of data points y(j) such that t(j) < y(j) < t(j+k+1), j=1,2,...,n-k-1 """ class LSQUnivariateSpline(UnivariateSpline): """ 1-D spline with explicit internal knots. Fits a spline y = spl(x) of degree `k` to the provided `x`, `y` data. `t` specifies the internal knots of the spline Parameters ---------- x : (N,) array_like Input dimension of data points -- must be increasing y : (N,) array_like Input dimension of data points t : (M,) array_like interior knots of the spline. Must be in ascending order and:: bbox[0] < t[0] < ... < t[-1] < bbox[-1] w : (N,) array_like, optional weights for spline fitting. Must be positive. If None (default), weights are all equal. bbox : (2,) array_like, optional 2-sequence specifying the boundary of the approximation interval. If None (default), ``bbox = [x[0], x[-1]]``. k : int, optional Degree of the smoothing spline. Must be 1 <= `k` <= 5. Default is `k` = 3, a cubic spline. ext : int or str, optional Controls the extrapolation mode for elements not in the interval defined by the knot sequence. * if ext=0 or 'extrapolate', return the extrapolated value. * if ext=1 or 'zeros', return 0 * if ext=2 or 'raise', raise a ValueError * if ext=3 of 'const', return the boundary value. The default value is 0. check_finite : bool, optional Whether to check that the input arrays contain only finite numbers. Disabling may give a performance gain, but may result in problems (crashes, non-termination or non-sensical results) if the inputs do contain infinities or NaNs. Default is False. Raises ------ ValueError If the interior knots do not satisfy the Schoenberg-Whitney conditions See Also -------- UnivariateSpline : a smooth univariate spline to fit a given set of data points. InterpolatedUnivariateSpline : a interpolating univariate spline for a given set of data points. splrep : a function to find the B-spline representation of a 1-D curve splev : a function to evaluate a B-spline or its derivatives sproot : a function to find the roots of a cubic B-spline splint : a function to evaluate the definite integral of a B-spline between two given points spalde : a function to evaluate all derivatives of a B-spline Notes ----- The number of data points must be larger than the spline degree `k`. Knots `t` must satisfy the Schoenberg-Whitney conditions, i.e., there must be a subset of data points ``x[j]`` such that ``t[j] < x[j] < t[j+k+1]``, for ``j=0, 1,...,n-k-2``. Examples -------- >>> from scipy.interpolate import LSQUnivariateSpline, UnivariateSpline >>> import matplotlib.pyplot as plt >>> x = np.linspace(-3, 3, 50) >>> y = np.exp(-x**2) + 0.1 * np.random.randn(50) Fit a smoothing spline with a pre-defined internal knots: >>> t = [-1, 0, 1] >>> spl = LSQUnivariateSpline(x, y, t) >>> xs = np.linspace(-3, 3, 1000) >>> plt.plot(x, y, 'ro', ms=5) >>> plt.plot(xs, spl(xs), 'g-', lw=3) >>> plt.show() Check the knot vector: >>> spl.get_knots() array([-3., -1., 0., 1., 3.]) Constructing lsq spline using the knots from another spline: >>> x = np.arange(10) >>> s = UnivariateSpline(x, x, s=0) >>> s.get_knots() array([ 0., 2., 3., 4., 5., 6., 7., 9.]) >>> knt = s.get_knots() >>> s1 = LSQUnivariateSpline(x, x, knt[1:-1]) # Chop 1st and last knot >>> s1.get_knots() array([ 0., 2., 3., 4., 5., 6., 7., 9.]) """ def __init__(self, x, y, t, w=None, bbox=[None]*2, k=3, ext=0, check_finite=False): x, y, w, bbox, self.ext = self.validate_input(x, y, w, bbox, k, None, ext, check_finite) if not np.all(diff(x) >= 0.0): raise ValueError('x must be increasing') # _data == x,y,w,xb,xe,k,s,n,t,c,fp,fpint,nrdata,ier xb = bbox[0] xe = bbox[1] if xb is None: xb = x[0] if xe is None: xe = x[-1] t = concatenate(([xb]*(k+1), t, [xe]*(k+1))) n = len(t) if not np.all(t[k+1:n-k]-t[k:n-k-1] > 0, axis=0): raise ValueError('Interior knots t must satisfy ' 'Schoenberg-Whitney conditions') if not dfitpack.fpchec(x, t, k) == 0: raise ValueError(_fpchec_error_string) data = dfitpack.fpcurfm1(x, y, k, t, w=w, xb=xb, xe=xe) self._data = data[:-3] + (None, None, data[-1]) self._reset_class() # ############### Bivariate spline #################### class _BivariateSplineBase(object): """ Base class for Bivariate spline s(x,y) interpolation on the rectangle [xb,xe] x [yb, ye] calculated from a given set of data points (x,y,z). See Also -------- bisplrep : a function to find a bivariate B-spline representation of a surface bisplev : a function to evaluate a bivariate B-spline and its derivatives BivariateSpline : a base class for bivariate splines. SphereBivariateSpline : a bivariate spline on a spherical grid """ def get_residual(self): """ Return weighted sum of squared residuals of the spline approximation: sum ((w[i]*(z[i]-s(x[i],y[i])))**2,axis=0) """ return self.fp def get_knots(self): """ Return a tuple (tx,ty) where tx,ty contain knots positions of the spline with respect to x-, y-variable, respectively. The position of interior and additional knots are given as t[k+1:-k-1] and t[:k+1]=b, t[-k-1:]=e, respectively. """ return self.tck[:2] def get_coeffs(self): """ Return spline coefficients.""" return self.tck[2] def __call__(self, x, y, dx=0, dy=0, grid=True): """ Evaluate the spline or its derivatives at given positions. Parameters ---------- x, y : array_like Input coordinates. If `grid` is False, evaluate the spline at points ``(x[i], y[i]), i=0, ..., len(x)-1``. Standard Numpy broadcasting is obeyed. If `grid` is True: evaluate spline at the grid points defined by the coordinate arrays x, y. The arrays must be sorted to increasing order. Note that the axis ordering is inverted relative to the output of meshgrid. dx : int Order of x-derivative .. versionadded:: 0.14.0 dy : int Order of y-derivative .. versionadded:: 0.14.0 grid : bool Whether to evaluate the results on a grid spanned by the input arrays, or at points specified by the input arrays. .. versionadded:: 0.14.0 """ x = np.asarray(x) y = np.asarray(y) tx, ty, c = self.tck[:3] kx, ky = self.degrees if grid: if x.size == 0 or y.size == 0: return np.zeros((x.size, y.size), dtype=self.tck[2].dtype) if dx or dy: z, ier = dfitpack.parder(tx, ty, c, kx, ky, dx, dy, x, y) if not ier == 0: raise ValueError("Error code returned by parder: %s" % ier) else: z, ier = dfitpack.bispev(tx, ty, c, kx, ky, x, y) if not ier == 0: raise ValueError("Error code returned by bispev: %s" % ier) else: # standard Numpy broadcasting if x.shape != y.shape: x, y = np.broadcast_arrays(x, y) shape = x.shape x = x.ravel() y = y.ravel() if x.size == 0 or y.size == 0: return np.zeros(shape, dtype=self.tck[2].dtype) if dx or dy: z, ier = dfitpack.pardeu(tx, ty, c, kx, ky, dx, dy, x, y) if not ier == 0: raise ValueError("Error code returned by pardeu: %s" % ier) else: z, ier = dfitpack.bispeu(tx, ty, c, kx, ky, x, y) if not ier == 0: raise ValueError("Error code returned by bispeu: %s" % ier) z = z.reshape(shape) return z _surfit_messages = {1: """ The required storage space exceeds the available storage space: nxest or nyest too small, or s too small. The weighted least-squares spline corresponds to the current set of knots.""", 2: """ A theoretically impossible result was found during the iteration process for finding a smoothing spline with fp = s: s too small or badly chosen eps. Weighted sum of squared residuals does not satisfy abs(fp-s)/s < tol.""", 3: """ the maximal number of iterations maxit (set to 20 by the program) allowed for finding a smoothing spline with fp=s has been reached: s too small. Weighted sum of squared residuals does not satisfy abs(fp-s)/s < tol.""", 4: """ No more knots can be added because the number of b-spline coefficients (nx-kx-1)*(ny-ky-1) already exceeds the number of data points m: either s or m too small. The weighted least-squares spline corresponds to the current set of knots.""", 5: """ No more knots can be added because the additional knot would (quasi) coincide with an old one: s too small or too large a weight to an inaccurate data point. The weighted least-squares spline corresponds to the current set of knots.""", 10: """ Error on entry, no approximation returned. The following conditions must hold: xb<=x[i]<=xe, yb<=y[i]<=ye, w[i]>0, i=0..m-1 If iopt==-1, then xb<tx[kx+1]<tx[kx+2]<...<tx[nx-kx-2]<xe yb<ty[ky+1]<ty[ky+2]<...<ty[ny-ky-2]<ye""", -3: """ The coefficients of the spline returned have been computed as the minimal norm least-squares solution of a (numerically) rank deficient system (deficiency=%i). If deficiency is large, the results may be inaccurate. Deficiency may strongly depend on the value of eps.""" } class BivariateSpline(_BivariateSplineBase): """ Base class for bivariate splines. This describes a spline ``s(x, y)`` of degrees ``kx`` and ``ky`` on the rectangle ``[xb, xe] * [yb, ye]`` calculated from a given set of data points ``(x, y, z)``. This class is meant to be subclassed, not instantiated directly. To construct these splines, call either `SmoothBivariateSpline` or `LSQBivariateSpline` or `RectBivariateSpline`. See Also -------- UnivariateSpline : a smooth univariate spline to fit a given set of data points. SmoothBivariateSpline : a smoothing bivariate spline through the given points LSQBivariateSpline : a bivariate spline using weighted least-squares fitting RectSphereBivariateSpline : a bivariate spline over a rectangular mesh on a sphere SmoothSphereBivariateSpline : a smoothing bivariate spline in spherical coordinates LSQSphereBivariateSpline : a bivariate spline in spherical coordinates using weighted least-squares fitting RectBivariateSpline : a bivariate spline over a rectangular mesh. bisplrep : a function to find a bivariate B-spline representation of a surface bisplev : a function to evaluate a bivariate B-spline and its derivatives """ @classmethod def _from_tck(cls, tck): """Construct a spline object from given tck and degree""" self = cls.__new__(cls) if len(tck) != 5: raise ValueError("tck should be a 5 element tuple of tx," " ty, c, kx, ky") self.tck = tck[:3] self.degrees = tck[3:] return self def ev(self, xi, yi, dx=0, dy=0): """ Evaluate the spline at points Returns the interpolated value at ``(xi[i], yi[i]), i=0,...,len(xi)-1``. Parameters ---------- xi, yi : array_like Input coordinates. Standard Numpy broadcasting is obeyed. dx : int, optional Order of x-derivative .. versionadded:: 0.14.0 dy : int, optional Order of y-derivative .. versionadded:: 0.14.0 """ return self.__call__(xi, yi, dx=dx, dy=dy, grid=False) def integral(self, xa, xb, ya, yb): """ Evaluate the integral of the spline over area [xa,xb] x [ya,yb]. Parameters ---------- xa, xb : float The end-points of the x integration interval. ya, yb : float The end-points of the y integration interval. Returns ------- integ : float The value of the resulting integral. """ tx, ty, c = self.tck[:3] kx, ky = self.degrees return dfitpack.dblint(tx, ty, c, kx, ky, xa, xb, ya, yb) @staticmethod def _validate_input(x, y, z, w, kx, ky, eps): x, y, z = np.asarray(x), np.asarray(y), np.asarray(z) if not x.size == y.size == z.size: raise ValueError('x, y, and z should have a same length') if w is not None: w = np.asarray(w) if x.size != w.size: raise ValueError('x, y, z, and w should have a same length') elif not np.all(w >= 0.0): raise ValueError('w should be positive') if (eps is not None) and (not 0.0 < eps < 1.0): raise ValueError('eps should be between (0, 1)') if not x.size >= (kx + 1) * (ky + 1): raise ValueError('The length of x, y and z should be at least' ' (kx+1) * (ky+1)') return x, y, z, w class SmoothBivariateSpline(BivariateSpline): """ Smooth bivariate spline approximation. Parameters ---------- x, y, z : array_like 1-D sequences of data points (order is not important). w : array_like, optional Positive 1-D sequence of weights, of same length as `x`, `y` and `z`. bbox : array_like, optional Sequence of length 4 specifying the boundary of the rectangular approximation domain. By default, ``bbox=[min(x), max(x), min(y), max(y)]``. kx, ky : ints, optional Degrees of the bivariate spline. Default is 3. s : float, optional Positive smoothing factor defined for estimation condition: ``sum((w[i]*(z[i]-s(x[i], y[i])))**2, axis=0) <= s`` Default ``s=len(w)`` which should be a good value if ``1/w[i]`` is an estimate of the standard deviation of ``z[i]``. eps : float, optional A threshold for determining the effective rank of an over-determined linear system of equations. `eps` should have a value within the open interval ``(0, 1)``, the default is 1e-16. See Also -------- BivariateSpline : a base class for bivariate splines. UnivariateSpline : a smooth univariate spline to fit a given set of data points. LSQBivariateSpline : a bivariate spline using weighted least-squares fitting RectSphereBivariateSpline : a bivariate spline over a rectangular mesh on a sphere SmoothSphereBivariateSpline : a smoothing bivariate spline in spherical coordinates LSQSphereBivariateSpline : a bivariate spline in spherical coordinates using weighted least-squares fitting RectBivariateSpline : a bivariate spline over a rectangular mesh bisplrep : a function to find a bivariate B-spline representation of a surface bisplev : a function to evaluate a bivariate B-spline and its derivatives Notes ----- The length of `x`, `y` and `z` should be at least ``(kx+1) * (ky+1)``. """ def __init__(self, x, y, z, w=None, bbox=[None] * 4, kx=3, ky=3, s=None, eps=1e-16): x, y, z, w = self._validate_input(x, y, z, w, kx, ky, eps) bbox = ravel(bbox) if not bbox.shape == (4,): raise ValueError('bbox shape should be (4,)') if s is not None and not s >= 0.0: raise ValueError("s should be s >= 0.0") xb, xe, yb, ye = bbox nx, tx, ny, ty, c, fp, wrk1, ier = dfitpack.surfit_smth(x, y, z, w, xb, xe, yb, ye, kx, ky, s=s, eps=eps, lwrk2=1) if ier > 10: # lwrk2 was to small, re-run nx, tx, ny, ty, c, fp, wrk1, ier = dfitpack.surfit_smth(x, y, z, w, xb, xe, yb, ye, kx, ky, s=s, eps=eps, lwrk2=ier) if ier in [0, -1, -2]: # normal return pass else: message = _surfit_messages.get(ier, 'ier=%s' % (ier)) warnings.warn(message) self.fp = fp self.tck = tx[:nx], ty[:ny], c[:(nx-kx-1)*(ny-ky-1)] self.degrees = kx, ky class LSQBivariateSpline(BivariateSpline): """ Weighted least-squares bivariate spline approximation. Parameters ---------- x, y, z : array_like 1-D sequences of data points (order is not important). tx, ty : array_like Strictly ordered 1-D sequences of knots coordinates. w : array_like, optional Positive 1-D array of weights, of the same length as `x`, `y` and `z`. bbox : (4,) array_like, optional Sequence of length 4 specifying the boundary of the rectangular approximation domain. By default, ``bbox=[min(x,tx),max(x,tx), min(y,ty),max(y,ty)]``. kx, ky : ints, optional Degrees of the bivariate spline. Default is 3. eps : float, optional A threshold for determining the effective rank of an over-determined linear system of equations. `eps` should have a value within the open interval ``(0, 1)``, the default is 1e-16. See Also -------- BivariateSpline : a base class for bivariate splines. UnivariateSpline : a smooth univariate spline to fit a given set of data points. SmoothBivariateSpline : a smoothing bivariate spline through the given points RectSphereBivariateSpline : a bivariate spline over a rectangular mesh on a sphere SmoothSphereBivariateSpline : a smoothing bivariate spline in spherical coordinates LSQSphereBivariateSpline : a bivariate spline in spherical coordinates using weighted least-squares fitting RectBivariateSpline : a bivariate spline over a rectangular mesh. bisplrep : a function to find a bivariate B-spline representation of a surface bisplev : a function to evaluate a bivariate B-spline and its derivatives Notes ----- The length of `x`, `y` and `z` should be at least ``(kx+1) * (ky+1)``. """ def __init__(self, x, y, z, tx, ty, w=None, bbox=[None]*4, kx=3, ky=3, eps=None): x, y, z, w = self._validate_input(x, y, z, w, kx, ky, eps) bbox = ravel(bbox) if not bbox.shape == (4,): raise ValueError('bbox shape should be (4,)') nx = 2*kx+2+len(tx) ny = 2*ky+2+len(ty) tx1 = zeros((nx,), float) ty1 = zeros((ny,), float) tx1[kx+1:nx-kx-1] = tx ty1[ky+1:ny-ky-1] = ty xb, xe, yb, ye = bbox tx1, ty1, c, fp, ier = dfitpack.surfit_lsq(x, y, z, tx1, ty1, w, xb, xe, yb, ye, kx, ky, eps, lwrk2=1) if ier > 10: tx1, ty1, c, fp, ier = dfitpack.surfit_lsq(x, y, z, tx1, ty1, w, xb, xe, yb, ye, kx, ky, eps, lwrk2=ier) if ier in [0, -1, -2]: # normal return pass else: if ier < -2: deficiency = (nx-kx-1)*(ny-ky-1)+ier message = _surfit_messages.get(-3) % (deficiency) else: message = _surfit_messages.get(ier, 'ier=%s' % (ier)) warnings.warn(message) self.fp = fp self.tck = tx1, ty1, c self.degrees = kx, ky class RectBivariateSpline(BivariateSpline): """ Bivariate spline approximation over a rectangular mesh. Can be used for both smoothing and interpolating data. Parameters ---------- x,y : array_like 1-D arrays of coordinates in strictly ascending order. z : array_like 2-D array of data with shape (x.size,y.size). bbox : array_like, optional Sequence of length 4 specifying the boundary of the rectangular approximation domain. By default, ``bbox=[min(x), max(x), min(y), max(y)]``. kx, ky : ints, optional Degrees of the bivariate spline. Default is 3. s : float, optional Positive smoothing factor defined for estimation condition: ``sum((z[i]-f(x[i], y[i]))**2, axis=0) <= s`` where f is a spline function. Default is ``s=0``, which is for interpolation. See Also -------- BivariateSpline : a base class for bivariate splines. UnivariateSpline : a smooth univariate spline to fit a given set of data points. SmoothBivariateSpline : a smoothing bivariate spline through the given points LSQBivariateSpline : a bivariate spline using weighted least-squares fitting RectSphereBivariateSpline : a bivariate spline over a rectangular mesh on a sphere SmoothSphereBivariateSpline : a smoothing bivariate spline in spherical coordinates LSQSphereBivariateSpline : a bivariate spline in spherical coordinates using weighted least-squares fitting bisplrep : a function to find a bivariate B-spline representation of a surface bisplev : a function to evaluate a bivariate B-spline and its derivatives """ def __init__(self, x, y, z, bbox=[None] * 4, kx=3, ky=3, s=0): x, y, bbox = ravel(x), ravel(y), ravel(bbox) z = np.asarray(z) if not np.all(diff(x) > 0.0): raise ValueError('x must be strictly increasing') if not np.all(diff(y) > 0.0): raise ValueError('y must be strictly increasing') if not x.size == z.shape[0]: raise ValueError('x dimension of z must have same number of ' 'elements as x') if not y.size == z.shape[1]: raise ValueError('y dimension of z must have same number of ' 'elements as y') if not bbox.shape == (4,): raise ValueError('bbox shape should be (4,)') if s is not None and not s >= 0.0: raise ValueError("s should be s >= 0.0") z = ravel(z) xb, xe, yb, ye = bbox nx, tx, ny, ty, c, fp, ier = dfitpack.regrid_smth(x, y, z, xb, xe, yb, ye, kx, ky, s) if ier not in [0, -1, -2]: msg = _surfit_messages.get(ier, 'ier=%s' % (ier)) raise ValueError(msg) self.fp = fp self.tck = tx[:nx], ty[:ny], c[:(nx - kx - 1) * (ny - ky - 1)] self.degrees = kx, ky _spherefit_messages = _surfit_messages.copy() _spherefit_messages[10] = """ ERROR. On entry, the input data are controlled on validity. The following restrictions must be satisfied: -1<=iopt<=1, m>=2, ntest>=8 ,npest >=8, 0<eps<1, 0<=teta(i)<=pi, 0<=phi(i)<=2*pi, w(i)>0, i=1,...,m lwrk1 >= 185+52*v+10*u+14*u*v+8*(u-1)*v**2+8*m kwrk >= m+(ntest-7)*(npest-7) if iopt=-1: 8<=nt<=ntest , 9<=np<=npest 0<tt(5)<tt(6)<...<tt(nt-4)<pi 0<tp(5)<tp(6)<...<tp(np-4)<2*pi if iopt>=0: s>=0 if one of these conditions is found to be violated,control is immediately repassed to the calling program. in that case there is no approximation returned.""" _spherefit_messages[-3] = """ WARNING. The coefficients of the spline returned have been computed as the minimal norm least-squares solution of a (numerically) rank deficient system (deficiency=%i, rank=%i). Especially if the rank deficiency, which is computed by 6+(nt-8)*(np-7)+ier, is large, the results may be inaccurate. They could also seriously depend on the value of eps.""" class SphereBivariateSpline(_BivariateSplineBase): """ Bivariate spline s(x,y) of degrees 3 on a sphere, calculated from a given set of data points (theta,phi,r). .. versionadded:: 0.11.0 See Also -------- bisplrep : a function to find a bivariate B-spline representation of a surface bisplev : a function to evaluate a bivariate B-spline and its derivatives UnivariateSpline : a smooth univariate spline to fit a given set of data points. SmoothUnivariateSpline : a smooth univariate spline through the given points LSQUnivariateSpline : a univariate spline using weighted least-squares fitting """ def __call__(self, theta, phi, dtheta=0, dphi=0, grid=True): """ Evaluate the spline or its derivatives at given positions. Parameters ---------- theta, phi : array_like Input coordinates. If `grid` is False, evaluate the spline at points ``(theta[i], phi[i]), i=0, ..., len(x)-1``. Standard Numpy broadcasting is obeyed. If `grid` is True: evaluate spline at the grid points defined by the coordinate arrays theta, phi. The arrays must be sorted to increasing order. dtheta : int, optional Order of theta-derivative .. versionadded:: 0.14.0 dphi : int Order of phi-derivative .. versionadded:: 0.14.0 grid : bool Whether to evaluate the results on a grid spanned by the input arrays, or at points specified by the input arrays. .. versionadded:: 0.14.0 """ theta = np.asarray(theta) phi = np.asarray(phi) if theta.size > 0 and (theta.min() < 0. or theta.max() > np.pi): raise ValueError("requested theta out of bounds.") if phi.size > 0 and (phi.min() < 0. or phi.max() > 2. * np.pi): raise ValueError("requested phi out of bounds.") return _BivariateSplineBase.__call__(self, theta, phi, dx=dtheta, dy=dphi, grid=grid) def ev(self, theta, phi, dtheta=0, dphi=0): """ Evaluate the spline at points Returns the interpolated value at ``(theta[i], phi[i]), i=0,...,len(theta)-1``. Parameters ---------- theta, phi : array_like Input coordinates. Standard Numpy broadcasting is obeyed. dtheta : int, optional Order of theta-derivative .. versionadded:: 0.14.0 dphi : int, optional Order of phi-derivative .. versionadded:: 0.14.0 """ return self.__call__(theta, phi, dtheta=dtheta, dphi=dphi, grid=False) class SmoothSphereBivariateSpline(SphereBivariateSpline): """ Smooth bivariate spline approximation in spherical coordinates. .. versionadded:: 0.11.0 Parameters ---------- theta, phi, r : array_like 1-D sequences of data points (order is not important). Coordinates must be given in radians. Theta must lie within the interval ``[0, pi]``, and phi must lie within the interval ``[0, 2pi]``. w : array_like, optional Positive 1-D sequence of weights. s : float, optional Positive smoothing factor defined for estimation condition: ``sum((w(i)*(r(i) - s(theta(i), phi(i))))**2, axis=0) <= s`` Default ``s=len(w)`` which should be a good value if ``1/w[i]`` is an estimate of the standard deviation of ``r[i]``. eps : float, optional A threshold for determining the effective rank of an over-determined linear system of equations. `eps` should have a value within the open interval ``(0, 1)``, the default is 1e-16. See Also -------- BivariateSpline : a base class for bivariate splines. UnivariateSpline : a smooth univariate spline to fit a given set of data points. SmoothBivariateSpline : a smoothing bivariate spline through the given points LSQBivariateSpline : a bivariate spline using weighted least-squares fitting RectSphereBivariateSpline : a bivariate spline over a rectangular mesh on a sphere LSQSphereBivariateSpline : a bivariate spline in spherical coordinates using weighted least-squares fitting RectBivariateSpline : a bivariate spline over a rectangular mesh. bisplrep : a function to find a bivariate B-spline representation of a surface bisplev : a function to evaluate a bivariate B-spline and its derivatives Notes ----- For more information, see the FITPACK_ site about this function. .. _FITPACK: http://www.netlib.org/dierckx/sphere.f Examples -------- Suppose we have global data on a coarse grid (the input data does not have to be on a grid): >>> theta = np.linspace(0., np.pi, 7) >>> phi = np.linspace(0., 2*np.pi, 9) >>> data = np.empty((theta.shape[0], phi.shape[0])) >>> data[:,0], data[0,:], data[-1,:] = 0., 0., 0. >>> data[1:-1,1], data[1:-1,-1] = 1., 1. >>> data[1,1:-1], data[-2,1:-1] = 1., 1. >>> data[2:-2,2], data[2:-2,-2] = 2., 2. >>> data[2,2:-2], data[-3,2:-2] = 2., 2. >>> data[3,3:-2] = 3. >>> data = np.roll(data, 4, 1) We need to set up the interpolator object >>> lats, lons = np.meshgrid(theta, phi) >>> from scipy.interpolate import SmoothSphereBivariateSpline >>> lut = SmoothSphereBivariateSpline(lats.ravel(), lons.ravel(), ... data.T.ravel(), s=3.5) As a first test, we'll see what the algorithm returns when run on the input coordinates >>> data_orig = lut(theta, phi) Finally we interpolate the data to a finer grid >>> fine_lats = np.linspace(0., np.pi, 70) >>> fine_lons = np.linspace(0., 2 * np.pi, 90) >>> data_smth = lut(fine_lats, fine_lons) >>> import matplotlib.pyplot as plt >>> fig = plt.figure() >>> ax1 = fig.add_subplot(131) >>> ax1.imshow(data, interpolation='nearest') >>> ax2 = fig.add_subplot(132) >>> ax2.imshow(data_orig, interpolation='nearest') >>> ax3 = fig.add_subplot(133) >>> ax3.imshow(data_smth, interpolation='nearest') >>> plt.show() """ def __init__(self, theta, phi, r, w=None, s=0., eps=1E-16): theta, phi, r = np.asarray(theta), np.asarray(phi), np.asarray(r) # input validation if not ((0.0 <= theta).all() and (theta <= np.pi).all()): raise ValueError('theta should be between [0, pi]') if not ((0.0 <= phi).all() and (phi <= 2.0 * np.pi).all()): raise ValueError('phi should be between [0, 2pi]') if w is not None: w = np.asarray(w) if not (w >= 0.0).all(): raise ValueError('w should be positive') if not s >= 0.0: raise ValueError('s should be positive') if not 0.0 < eps < 1.0: raise ValueError('eps should be between (0, 1)') if np.issubclass_(w, float): w = ones(len(theta)) * w nt_, tt_, np_, tp_, c, fp, ier = dfitpack.spherfit_smth(theta, phi, r, w=w, s=s, eps=eps) if ier not in [0, -1, -2]: message = _spherefit_messages.get(ier, 'ier=%s' % (ier)) raise ValueError(message) self.fp = fp self.tck = tt_[:nt_], tp_[:np_], c[:(nt_ - 4) * (np_ - 4)] self.degrees = (3, 3) class LSQSphereBivariateSpline(SphereBivariateSpline): """ Weighted least-squares bivariate spline approximation in spherical coordinates. Determines a smoothing bicubic spline according to a given set of knots in the `theta` and `phi` directions. .. versionadded:: 0.11.0 Parameters ---------- theta, phi, r : array_like 1-D sequences of data points (order is not important). Coordinates must be given in radians. Theta must lie within the interval ``[0, pi]``, and phi must lie within the interval ``[0, 2pi]``. tt, tp : array_like Strictly ordered 1-D sequences of knots coordinates. Coordinates must satisfy ``0 < tt[i] < pi``, ``0 < tp[i] < 2*pi``. w : array_like, optional Positive 1-D sequence of weights, of the same length as `theta`, `phi` and `r`. eps : float, optional A threshold for determining the effective rank of an over-determined linear system of equations. `eps` should have a value within the open interval ``(0, 1)``, the default is 1e-16. See Also -------- BivariateSpline : a base class for bivariate splines. UnivariateSpline : a smooth univariate spline to fit a given set of data points. SmoothBivariateSpline : a smoothing bivariate spline through the given points LSQBivariateSpline : a bivariate spline using weighted least-squares fitting RectSphereBivariateSpline : a bivariate spline over a rectangular mesh on a sphere SmoothSphereBivariateSpline : a smoothing bivariate spline in spherical coordinates RectBivariateSpline : a bivariate spline over a rectangular mesh. bisplrep : a function to find a bivariate B-spline representation of a surface bisplev : a function to evaluate a bivariate B-spline and its derivatives Notes ----- For more information, see the FITPACK_ site about this function. .. _FITPACK: http://www.netlib.org/dierckx/sphere.f Examples -------- Suppose we have global data on a coarse grid (the input data does not have to be on a grid): >>> from scipy.interpolate import LSQSphereBivariateSpline >>> import matplotlib.pyplot as plt >>> theta = np.linspace(0, np.pi, num=7) >>> phi = np.linspace(0, 2*np.pi, num=9) >>> data = np.empty((theta.shape[0], phi.shape[0])) >>> data[:,0], data[0,:], data[-1,:] = 0., 0., 0. >>> data[1:-1,1], data[1:-1,-1] = 1., 1. >>> data[1,1:-1], data[-2,1:-1] = 1., 1. >>> data[2:-2,2], data[2:-2,-2] = 2., 2. >>> data[2,2:-2], data[-3,2:-2] = 2., 2. >>> data[3,3:-2] = 3. >>> data = np.roll(data, 4, 1) We need to set up the interpolator object. Here, we must also specify the coordinates of the knots to use. >>> lats, lons = np.meshgrid(theta, phi) >>> knotst, knotsp = theta.copy(), phi.copy() >>> knotst[0] += .0001 >>> knotst[-1] -= .0001 >>> knotsp[0] += .0001 >>> knotsp[-1] -= .0001 >>> lut = LSQSphereBivariateSpline(lats.ravel(), lons.ravel(), ... data.T.ravel(), knotst, knotsp) As a first test, we'll see what the algorithm returns when run on the input coordinates >>> data_orig = lut(theta, phi) Finally we interpolate the data to a finer grid >>> fine_lats = np.linspace(0., np.pi, 70) >>> fine_lons = np.linspace(0., 2*np.pi, 90) >>> data_lsq = lut(fine_lats, fine_lons) >>> fig = plt.figure() >>> ax1 = fig.add_subplot(131) >>> ax1.imshow(data, interpolation='nearest') >>> ax2 = fig.add_subplot(132) >>> ax2.imshow(data_orig, interpolation='nearest') >>> ax3 = fig.add_subplot(133) >>> ax3.imshow(data_lsq, interpolation='nearest') >>> plt.show() """ def __init__(self, theta, phi, r, tt, tp, w=None, eps=1E-16): theta, phi, r = np.asarray(theta), np.asarray(phi), np.asarray(r) tt, tp = np.asarray(tt), np.asarray(tp) if not ((0.0 <= theta).all() and (theta <= np.pi).all()): raise ValueError('theta should be between [0, pi]') if not ((0.0 <= phi).all() and (phi <= 2*np.pi).all()): raise ValueError('phi should be between [0, 2pi]') if not ((0.0 < tt).all() and (tt < np.pi).all()): raise ValueError('tt should be between (0, pi)') if not ((0.0 < tp).all() and (tp < 2*np.pi).all()): raise ValueError('tp should be between (0, 2pi)') if w is not None: w = np.asarray(w) if not (w >= 0.0).all(): raise ValueError('w should be positive') if not 0.0 < eps < 1.0: raise ValueError('eps should be between (0, 1)') if np.issubclass_(w, float): w = ones(len(theta)) * w nt_, np_ = 8 + len(tt), 8 + len(tp) tt_, tp_ = zeros((nt_,), float), zeros((np_,), float) tt_[4:-4], tp_[4:-4] = tt, tp tt_[-4:], tp_[-4:] = np.pi, 2. * np.pi tt_, tp_, c, fp, ier = dfitpack.spherfit_lsq(theta, phi, r, tt_, tp_, w=w, eps=eps) if ier > 0: message = _spherefit_messages.get(ier, 'ier=%s' % (ier)) raise ValueError(message) self.fp = fp self.tck = tt_, tp_, c self.degrees = (3, 3) _spfit_messages = _surfit_messages.copy() _spfit_messages[10] = """ ERROR: on entry, the input data are controlled on validity the following restrictions must be satisfied. -1<=iopt(1)<=1, 0<=iopt(2)<=1, 0<=iopt(3)<=1, -1<=ider(1)<=1, 0<=ider(2)<=1, ider(2)=0 if iopt(2)=0. -1<=ider(3)<=1, 0<=ider(4)<=1, ider(4)=0 if iopt(3)=0. mu >= mumin (see above), mv >= 4, nuest >=8, nvest >= 8, kwrk>=5+mu+mv+nuest+nvest, lwrk >= 12+nuest*(mv+nvest+3)+nvest*24+4*mu+8*mv+max(nuest,mv+nvest) 0< u(i-1)<u(i)< pi,i=2,..,mu, -pi<=v(1)< pi, v(1)<v(i-1)<v(i)<v(1)+2*pi, i=3,...,mv if iopt(1)=-1: 8<=nu<=min(nuest,mu+6+iopt(2)+iopt(3)) 0<tu(5)<tu(6)<...<tu(nu-4)< pi 8<=nv<=min(nvest,mv+7) v(1)<tv(5)<tv(6)<...<tv(nv-4)<v(1)+2*pi the schoenberg-whitney conditions, i.e. there must be subset of grid co-ordinates uu(p) and vv(q) such that tu(p) < uu(p) < tu(p+4) ,p=1,...,nu-4 (iopt(2)=1 and iopt(3)=1 also count for a uu-value tv(q) < vv(q) < tv(q+4) ,q=1,...,nv-4 (vv(q) is either a value v(j) or v(j)+2*pi) if iopt(1)>=0: s>=0 if s=0: nuest>=mu+6+iopt(2)+iopt(3), nvest>=mv+7 if one of these conditions is found to be violated,control is immediately repassed to the calling program. in that case there is no approximation returned.""" class RectSphereBivariateSpline(SphereBivariateSpline): """ Bivariate spline approximation over a rectangular mesh on a sphere. Can be used for smoothing data. .. versionadded:: 0.11.0 Parameters ---------- u : array_like 1-D array of colatitude coordinates in strictly ascending order. Coordinates must be given in radians and lie within the interval ``[0, pi]``. v : array_like 1-D array of longitude coordinates in strictly ascending order. Coordinates must be given in radians. First element (``v[0]``) must lie within the interval ``[-pi, pi)``. Last element (``v[-1]``) must satisfy ``v[-1] <= v[0] + 2*pi``. r : array_like 2-D array of data with shape ``(u.size, v.size)``. s : float, optional Positive smoothing factor defined for estimation condition (``s=0`` is for interpolation). pole_continuity : bool or (bool, bool), optional Order of continuity at the poles ``u=0`` (``pole_continuity[0]``) and ``u=pi`` (``pole_continuity[1]``). The order of continuity at the pole will be 1 or 0 when this is True or False, respectively. Defaults to False. pole_values : float or (float, float), optional Data values at the poles ``u=0`` and ``u=pi``. Either the whole parameter or each individual element can be None. Defaults to None. pole_exact : bool or (bool, bool), optional Data value exactness at the poles ``u=0`` and ``u=pi``. If True, the value is considered to be the right function value, and it will be fitted exactly. If False, the value will be considered to be a data value just like the other data values. Defaults to False. pole_flat : bool or (bool, bool), optional For the poles at ``u=0`` and ``u=pi``, specify whether or not the approximation has vanishing derivatives. Defaults to False. See Also -------- BivariateSpline : a base class for bivariate splines. UnivariateSpline : a smooth univariate spline to fit a given set of data points. SmoothBivariateSpline : a smoothing bivariate spline through the given points LSQBivariateSpline : a bivariate spline using weighted least-squares fitting SmoothSphereBivariateSpline : a smoothing bivariate spline in spherical coordinates LSQSphereBivariateSpline : a bivariate spline in spherical coordinates using weighted least-squares fitting RectBivariateSpline : a bivariate spline over a rectangular mesh. bisplrep : a function to find a bivariate B-spline representation of a surface bisplev : a function to evaluate a bivariate B-spline and its derivatives Notes ----- Currently, only the smoothing spline approximation (``iopt[0] = 0`` and ``iopt[0] = 1`` in the FITPACK routine) is supported. The exact least-squares spline approximation is not implemented yet. When actually performing the interpolation, the requested `v` values must lie within the same length 2pi interval that the original `v` values were chosen from. For more information, see the FITPACK_ site about this function. .. _FITPACK: http://www.netlib.org/dierckx/spgrid.f Examples -------- Suppose we have global data on a coarse grid >>> lats = np.linspace(10, 170, 9) * np.pi / 180. >>> lons = np.linspace(0, 350, 18) * np.pi / 180. >>> data = np.dot(np.atleast_2d(90. - np.linspace(-80., 80., 18)).T, ... np.atleast_2d(180. - np.abs(np.linspace(0., 350., 9)))).T We want to interpolate it to a global one-degree grid >>> new_lats = np.linspace(1, 180, 180) * np.pi / 180 >>> new_lons = np.linspace(1, 360, 360) * np.pi / 180 >>> new_lats, new_lons = np.meshgrid(new_lats, new_lons) We need to set up the interpolator object >>> from scipy.interpolate import RectSphereBivariateSpline >>> lut = RectSphereBivariateSpline(lats, lons, data) Finally we interpolate the data. The `RectSphereBivariateSpline` object only takes 1-D arrays as input, therefore we need to do some reshaping. >>> data_interp = lut.ev(new_lats.ravel(), ... new_lons.ravel()).reshape((360, 180)).T Looking at the original and the interpolated data, one can see that the interpolant reproduces the original data very well: >>> import matplotlib.pyplot as plt >>> fig = plt.figure() >>> ax1 = fig.add_subplot(211) >>> ax1.imshow(data, interpolation='nearest') >>> ax2 = fig.add_subplot(212) >>> ax2.imshow(data_interp, interpolation='nearest') >>> plt.show() Choosing the optimal value of ``s`` can be a delicate task. Recommended values for ``s`` depend on the accuracy of the data values. If the user has an idea of the statistical errors on the data, she can also find a proper estimate for ``s``. By assuming that, if she specifies the right ``s``, the interpolator will use a spline ``f(u,v)`` which exactly reproduces the function underlying the data, she can evaluate ``sum((r(i,j)-s(u(i),v(j)))**2)`` to find a good estimate for this ``s``. For example, if she knows that the statistical errors on her ``r(i,j)``-values are not greater than 0.1, she may expect that a good ``s`` should have a value not larger than ``u.size * v.size * (0.1)**2``. If nothing is known about the statistical error in ``r(i,j)``, ``s`` must be determined by trial and error. The best is then to start with a very large value of ``s`` (to determine the least-squares polynomial and the corresponding upper bound ``fp0`` for ``s``) and then to progressively decrease the value of ``s`` (say by a factor 10 in the beginning, i.e. ``s = fp0 / 10, fp0 / 100, ...`` and more carefully as the approximation shows more detail) to obtain closer fits. The interpolation results for different values of ``s`` give some insight into this process: >>> fig2 = plt.figure() >>> s = [3e9, 2e9, 1e9, 1e8] >>> for ii in range(len(s)): ... lut = RectSphereBivariateSpline(lats, lons, data, s=s[ii]) ... data_interp = lut.ev(new_lats.ravel(), ... new_lons.ravel()).reshape((360, 180)).T ... ax = fig2.add_subplot(2, 2, ii+1) ... ax.imshow(data_interp, interpolation='nearest') ... ax.set_title("s = %g" % s[ii]) >>> plt.show() """ def __init__(self, u, v, r, s=0., pole_continuity=False, pole_values=None, pole_exact=False, pole_flat=False): iopt = np.array([0, 0, 0], dtype=dfitpack_int) ider = np.array([-1, 0, -1, 0], dtype=dfitpack_int) if pole_values is None: pole_values = (None, None) elif isinstance(pole_values, (float, np.float32, np.float64)): pole_values = (pole_values, pole_values) if isinstance(pole_continuity, bool): pole_continuity = (pole_continuity, pole_continuity) if isinstance(pole_exact, bool): pole_exact = (pole_exact, pole_exact) if isinstance(pole_flat, bool): pole_flat = (pole_flat, pole_flat) r0, r1 = pole_values iopt[1:] = pole_continuity if r0 is None: ider[0] = -1 else: ider[0] = pole_exact[0] if r1 is None: ider[2] = -1 else: ider[2] = pole_exact[1] ider[1], ider[3] = pole_flat u, v = np.ravel(u), np.ravel(v) r = np.asarray(r) if not ((0.0 <= u).all() and (u <= np.pi).all()): raise ValueError('u should be between [0, pi]') if not -np.pi <= v[0] < np.pi: raise ValueError('v[0] should be between [-pi, pi)') if not v[-1] <= v[0] + 2*np.pi: raise ValueError('v[-1] should be v[0] + 2pi or less ') if not np.all(np.diff(u) > 0.0): raise ValueError('u must be strictly increasing') if not np.all(np.diff(v) > 0.0): raise ValueError('v must be strictly increasing') if not u.size == r.shape[0]: raise ValueError('u dimension of r must have same number of ' 'elements as u') if not v.size == r.shape[1]: raise ValueError('v dimension of r must have same number of ' 'elements as v') if pole_continuity[1] is False and pole_flat[1] is True: raise ValueError('if pole_continuity is False, so must be ' 'pole_flat') if pole_continuity[0] is False and pole_flat[0] is True: raise ValueError('if pole_continuity is False, so must be ' 'pole_flat') if not s >= 0.0: raise ValueError('s should be positive') r = np.ravel(r) nu, tu, nv, tv, c, fp, ier = dfitpack.regrid_smth_spher(iopt, ider, u.copy(), v.copy(), r.copy(), r0, r1, s) if ier not in [0, -1, -2]: msg = _spfit_messages.get(ier, 'ier=%s' % (ier)) raise ValueError(msg) self.fp = fp self.tck = tu[:nu], tv[:nv], c[:(nu - 4) * (nv-4)] self.degrees = (3, 3)
import numpy as np from numpy.testing import assert_allclose, assert_array_equal import pytest from scipy.fft import dct, idct, dctn, idctn, dst, idst, dstn, idstn import scipy.fft as fft from scipy import fftpack # scipy.fft wraps the fftpack versions but with normalized inverse transforms. # So, the forward transforms and definitions are already thoroughly tested in # fftpack/test_real_transforms.py @pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("n", [2, 3, 4, 5, 10, 16]) @pytest.mark.parametrize("axis", [0, 1]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_identity_1d(forward, backward, type, n, axis, norm): # Test the identity f^-1(f(x)) == x x = np.random.rand(n, n) y = forward(x, type, axis=axis, norm=norm) z = backward(y, type, axis=axis, norm=norm) assert_allclose(z, x) pad = [(0, 0)] * 2 pad[axis] = (0, 4) y2 = np.pad(y, pad, mode='edge') z2 = backward(y2, type, n, axis, norm) assert_allclose(z2, x) @pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64, np.complex64, np.complex128]) @pytest.mark.parametrize("axis", [0, 1]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) @pytest.mark.parametrize("overwrite_x", [True, False]) def test_identity_1d_overwrite(forward, backward, type, dtype, axis, norm, overwrite_x): # Test the identity f^-1(f(x)) == x x = np.random.rand(7, 8) x_orig = x.copy() y = forward(x, type, axis=axis, norm=norm, overwrite_x=overwrite_x) y_orig = y.copy() z = backward(y, type, axis=axis, norm=norm, overwrite_x=overwrite_x) if not overwrite_x: assert_allclose(z, x, rtol=1e-6, atol=1e-6) assert_array_equal(x, x_orig) assert_array_equal(y, y_orig) else: assert_allclose(z, x_orig, rtol=1e-6, atol=1e-6) @pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("shape, axes", [ ((4, 4), 0), ((4, 4), 1), ((4, 4), None), ((4, 4), (0, 1)), ((10, 12), None), ((10, 12), (0, 1)), ((4, 5, 6), None), ((4, 5, 6), 1), ((4, 5, 6), (0, 2)), ]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_identity_nd(forward, backward, type, shape, axes, norm): # Test the identity f^-1(f(x)) == x x = np.random.random(shape) if axes is not None: shape = np.take(shape, axes) y = forward(x, type, axes=axes, norm=norm) z = backward(y, type, axes=axes, norm=norm) assert_allclose(z, x) if axes is None: pad = [(0, 4)] * x.ndim elif isinstance(axes, int): pad = [(0, 0)] * x.ndim pad[axes] = (0, 4) else: pad = [(0, 0)] * x.ndim for a in axes: pad[a] = (0, 4) y2 = np.pad(y, pad, mode='edge') z2 = backward(y2, type, shape, axes, norm) assert_allclose(z2, x) @pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("shape, axes", [ ((4, 5), 0), ((4, 5), 1), ((4, 5), None), ]) @pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64, np.complex64, np.complex128]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) @pytest.mark.parametrize("overwrite_x", [False, True]) def test_identity_nd_overwrite(forward, backward, type, shape, axes, dtype, norm, overwrite_x): # Test the identity f^-1(f(x)) == x x = np.random.random(shape).astype(dtype) x_orig = x.copy() if axes is not None: shape = np.take(shape, axes) y = forward(x, type, axes=axes, norm=norm) y_orig = y.copy() z = backward(y, type, axes=axes, norm=norm) if overwrite_x: assert_allclose(z, x_orig, rtol=1e-6, atol=1e-6) else: assert_allclose(z, x, rtol=1e-6, atol=1e-6) assert_array_equal(x, x_orig) assert_array_equal(y, y_orig) @pytest.mark.parametrize("func", ['dct', 'dst', 'dctn', 'dstn']) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_fftpack_equivalience(func, type, norm): x = np.random.rand(8, 16) fft_res = getattr(fft, func)(x, type, norm=norm) fftpack_res = getattr(fftpack, func)(x, type, norm=norm) assert_allclose(fft_res, fftpack_res)
e-q/scipy
scipy/fft/tests/test_real_transforms.py
scipy/interpolate/fitpack2.py
import numpy as np from numpy.testing import assert_allclose from scipy import ndimage from scipy.ndimage import _ctest from scipy.ndimage import _cytest from scipy._lib._ccallback import LowLevelCallable FILTER1D_FUNCTIONS = [ lambda filter_size: _ctest.filter1d(filter_size), lambda filter_size: _cytest.filter1d(filter_size, with_signature=False), lambda filter_size: LowLevelCallable(_cytest.filter1d(filter_size, with_signature=True)), lambda filter_size: LowLevelCallable.from_cython(_cytest, "_filter1d", _cytest.filter1d_capsule(filter_size)), ] FILTER2D_FUNCTIONS = [ lambda weights: _ctest.filter2d(weights), lambda weights: _cytest.filter2d(weights, with_signature=False), lambda weights: LowLevelCallable(_cytest.filter2d(weights, with_signature=True)), lambda weights: LowLevelCallable.from_cython(_cytest, "_filter2d", _cytest.filter2d_capsule(weights)), ] TRANSFORM_FUNCTIONS = [ lambda shift: _ctest.transform(shift), lambda shift: _cytest.transform(shift, with_signature=False), lambda shift: LowLevelCallable(_cytest.transform(shift, with_signature=True)), lambda shift: LowLevelCallable.from_cython(_cytest, "_transform", _cytest.transform_capsule(shift)), ] def test_generic_filter(): def filter2d(footprint_elements, weights): return (weights*footprint_elements).sum() def check(j): func = FILTER2D_FUNCTIONS[j] im = np.ones((20, 20)) im[:10,:10] = 0 footprint = np.array([[0, 1, 0], [1, 1, 1], [0, 1, 0]]) footprint_size = np.count_nonzero(footprint) weights = np.ones(footprint_size)/footprint_size res = ndimage.generic_filter(im, func(weights), footprint=footprint) std = ndimage.generic_filter(im, filter2d, footprint=footprint, extra_arguments=(weights,)) assert_allclose(res, std, err_msg="#{} failed".format(j)) for j, func in enumerate(FILTER2D_FUNCTIONS): check(j) def test_generic_filter1d(): def filter1d(input_line, output_line, filter_size): for i in range(output_line.size): output_line[i] = 0 for j in range(filter_size): output_line[i] += input_line[i+j] output_line /= filter_size def check(j): func = FILTER1D_FUNCTIONS[j] im = np.tile(np.hstack((np.zeros(10), np.ones(10))), (10, 1)) filter_size = 3 res = ndimage.generic_filter1d(im, func(filter_size), filter_size) std = ndimage.generic_filter1d(im, filter1d, filter_size, extra_arguments=(filter_size,)) assert_allclose(res, std, err_msg="#{} failed".format(j)) for j, func in enumerate(FILTER1D_FUNCTIONS): check(j) def test_geometric_transform(): def transform(output_coordinates, shift): return output_coordinates[0] - shift, output_coordinates[1] - shift def check(j): func = TRANSFORM_FUNCTIONS[j] im = np.arange(12).reshape(4, 3).astype(np.float64) shift = 0.5 res = ndimage.geometric_transform(im, func(shift)) std = ndimage.geometric_transform(im, transform, extra_arguments=(shift,)) assert_allclose(res, std, err_msg="#{} failed".format(j)) for j, func in enumerate(TRANSFORM_FUNCTIONS): check(j)
import numpy as np from numpy.testing import assert_allclose, assert_array_equal import pytest from scipy.fft import dct, idct, dctn, idctn, dst, idst, dstn, idstn import scipy.fft as fft from scipy import fftpack # scipy.fft wraps the fftpack versions but with normalized inverse transforms. # So, the forward transforms and definitions are already thoroughly tested in # fftpack/test_real_transforms.py @pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("n", [2, 3, 4, 5, 10, 16]) @pytest.mark.parametrize("axis", [0, 1]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_identity_1d(forward, backward, type, n, axis, norm): # Test the identity f^-1(f(x)) == x x = np.random.rand(n, n) y = forward(x, type, axis=axis, norm=norm) z = backward(y, type, axis=axis, norm=norm) assert_allclose(z, x) pad = [(0, 0)] * 2 pad[axis] = (0, 4) y2 = np.pad(y, pad, mode='edge') z2 = backward(y2, type, n, axis, norm) assert_allclose(z2, x) @pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64, np.complex64, np.complex128]) @pytest.mark.parametrize("axis", [0, 1]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) @pytest.mark.parametrize("overwrite_x", [True, False]) def test_identity_1d_overwrite(forward, backward, type, dtype, axis, norm, overwrite_x): # Test the identity f^-1(f(x)) == x x = np.random.rand(7, 8) x_orig = x.copy() y = forward(x, type, axis=axis, norm=norm, overwrite_x=overwrite_x) y_orig = y.copy() z = backward(y, type, axis=axis, norm=norm, overwrite_x=overwrite_x) if not overwrite_x: assert_allclose(z, x, rtol=1e-6, atol=1e-6) assert_array_equal(x, x_orig) assert_array_equal(y, y_orig) else: assert_allclose(z, x_orig, rtol=1e-6, atol=1e-6) @pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("shape, axes", [ ((4, 4), 0), ((4, 4), 1), ((4, 4), None), ((4, 4), (0, 1)), ((10, 12), None), ((10, 12), (0, 1)), ((4, 5, 6), None), ((4, 5, 6), 1), ((4, 5, 6), (0, 2)), ]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_identity_nd(forward, backward, type, shape, axes, norm): # Test the identity f^-1(f(x)) == x x = np.random.random(shape) if axes is not None: shape = np.take(shape, axes) y = forward(x, type, axes=axes, norm=norm) z = backward(y, type, axes=axes, norm=norm) assert_allclose(z, x) if axes is None: pad = [(0, 4)] * x.ndim elif isinstance(axes, int): pad = [(0, 0)] * x.ndim pad[axes] = (0, 4) else: pad = [(0, 0)] * x.ndim for a in axes: pad[a] = (0, 4) y2 = np.pad(y, pad, mode='edge') z2 = backward(y2, type, shape, axes, norm) assert_allclose(z2, x) @pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("shape, axes", [ ((4, 5), 0), ((4, 5), 1), ((4, 5), None), ]) @pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64, np.complex64, np.complex128]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) @pytest.mark.parametrize("overwrite_x", [False, True]) def test_identity_nd_overwrite(forward, backward, type, shape, axes, dtype, norm, overwrite_x): # Test the identity f^-1(f(x)) == x x = np.random.random(shape).astype(dtype) x_orig = x.copy() if axes is not None: shape = np.take(shape, axes) y = forward(x, type, axes=axes, norm=norm) y_orig = y.copy() z = backward(y, type, axes=axes, norm=norm) if overwrite_x: assert_allclose(z, x_orig, rtol=1e-6, atol=1e-6) else: assert_allclose(z, x, rtol=1e-6, atol=1e-6) assert_array_equal(x, x_orig) assert_array_equal(y, y_orig) @pytest.mark.parametrize("func", ['dct', 'dst', 'dctn', 'dstn']) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("norm", [None, 'backward', 'ortho', 'forward']) def test_fftpack_equivalience(func, type, norm): x = np.random.rand(8, 16) fft_res = getattr(fft, func)(x, type, norm=norm) fftpack_res = getattr(fftpack, func)(x, type, norm=norm) assert_allclose(fft_res, fftpack_res)
e-q/scipy
scipy/fft/tests/test_real_transforms.py
scipy/ndimage/tests/test_c_api.py
from udata.frontend import csv from .models import Reuse @csv.adapter(Reuse) class ReuseCsvAdapter(csv.Adapter): fields = ( 'id', 'title', 'slug', ('url', 'external_url'), 'type', 'description', ('remote_url', 'url'), ('organization', 'organization.name'), ('organization_id', 'organization.id'), ('image', lambda r: r.image(external=True)), ('featured', lambda r: r.featured or False), 'created_at', 'last_modified', ('tags', lambda r: ','.join(r.tags)), ('datasets', lambda r: ','.join([str(d.id) for d in r.datasets])), ) def dynamic_fields(self): return csv.metric_fields(Reuse)
import copy import pytest from flask import url_for from udata import theme from udata.core.dataset.factories import DatasetFactory from udata.core.reuse.factories import ReuseFactory from udata.core.spatial.factories import GeoZoneFactory from udata.core.user.factories import UserFactory from udata.core.organization.factories import OrganizationFactory from udata.features.territories.models import ( TerritoryDataset, TERRITORY_DATASETS ) from udata.frontend.markdown import mdstrip from udata.settings import Testing from udata.utils import faker from udata.tests.helpers import assert200, assert400, assert404, assert_status, assert_cors class OEmbedAPITest: modules = ['core.dataset', 'core.organization', 'core.reuse'] def test_oembed_for_dataset(self, api): '''It should fetch a dataset in the oembed format.''' dataset = DatasetFactory() url = url_for('api.oembed', url=dataset.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_dataset_with_organization(self, api): '''It should fetch a dataset in the oembed format with org.''' organization = OrganizationFactory() dataset = DatasetFactory(organization=organization) url = url_for('api.oembed', url=dataset.external_url) response = api.get(url) assert200(response) assert_cors(response) card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_dataset_redirect_link(self, api): '''It should fetch an oembed dataset using the redirect link.''' dataset = DatasetFactory() redirect_url = url_for('datasets.show_redirect', dataset=dataset, _external=True) url = url_for('api.oembed', url=redirect_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_unknown_dataset(self, api): '''It should raise a 404 on missing dataset.''' dataset_url = url_for('datasets.show', dataset='unknown', _external=True) url = url_for('api.oembed', url=dataset_url) response = api.get(url) assert404(response) assert_cors(response) def test_oembed_for_reuse(self, api): '''It should fetch a reuse in the oembed format.''' reuse = ReuseFactory() url = url_for('api.oembed', url=reuse.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('reuse/card.html', reuse=reuse) assert card in response.json['html'] def test_oembed_for_org(self, api): '''It should fetch an organization in the oembed format.''' org = OrganizationFactory() url = url_for('api.oembed', url=org.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('organization/card.html', organization=org) assert card in response.json['html'] def test_oembed_without_url(self, api): '''It should fail at fetching an oembed without a dataset.''' response = api.get(url_for('api.oembed')) assert400(response) assert 'url' in response.json['errors'] def test_oembed_with_an_invalid_url(self, api): '''It should fail at fetching an oembed with an invalid URL.''' response = api.get(url_for('api.oembed', url='123456789')) assert400(response) assert 'url' in response.json['errors'] def test_oembed_with_an_unknown_url(self, api): '''It should fail at fetching an oembed with an invalid URL.''' url = url_for('api.oembed', url='http://local.test/somewhere') response = api.get(url) assert404(response) assert_cors(response) def test_oembed_with_port_in_https_url(self, api): '''It should works on HTTPS URLs with explicit port.''' dataset = DatasetFactory() url = dataset.external_url.replace('http://local.test/', 'https://local.test:443/') api_url = url_for('api.oembed', url=url) assert200(api.get(api_url, base_url='https://local.test:443/')) def test_oembed_does_not_support_xml(self, api): '''It does not support xml format.''' dataset = DatasetFactory() url = url_for('api.oembed', url=dataset.external_url, format='xml') response = api.get(url) assert_status(response, 501) assert_cors(response) assert response.json['message'] == 'Only JSON format is supported' def territory_dataset_factory(): org = OrganizationFactory() class TestDataset(TerritoryDataset): order = 1 id = faker.word() title = faker.sentence() organization_id = str(org.id) description = faker.paragraph() temporal_coverage = {'start': 2007, 'end': 2012} url_template = 'http://somehere.com/{code}' return TestDataset class OEmbedSettings(Testing): ACTIVATE_TERRITORIES = True class OEmbedsDatasetAPITest: modules = ['core.organization', 'features.territories', 'core.dataset'] settings = OEmbedSettings @pytest.fixture(autouse=True) def copy_territoy_datasets(self): self.territory_datasets_backup = { k: copy.deepcopy(v) for k, v in TERRITORY_DATASETS.items() } yield TERRITORY_DATASETS.update(self.territory_datasets_backup) def test_oembeds_dataset_api_get(self, api): '''It should fetch a dataset in the oembed format.''' dataset = DatasetFactory() url = url_for('api.oembeds', references='dataset-{id}'.format(id=dataset.id)) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert dataset.title in data['html'] assert dataset.external_url in data['html'] assert 'placeholders/default.png' in data['html'] assert mdstrip(dataset.description, 110) in data['html'] def test_oembeds_dataset_api_get_with_organization(self, api): '''It should fetch a dataset in the oembed format with org.''' organization = OrganizationFactory() dataset = DatasetFactory(organization=organization) url = url_for('api.oembeds', references='dataset-{id}'.format(id=dataset.id)) response = api.get(url) assert200(response) data = response.json[0] assert organization.name in data['html'] assert organization.external_url in data['html'] def test_oembeds_dataset_api_get_without_references(self, api): '''It should fail at fetching an oembed without a dataset.''' response = api.get(url_for('api.oembeds')) assert400(response) assert 'references' in response.json['errors'] def test_oembeds_dataset_api_get_without_good_id(self, api): '''It should fail at fetching an oembed without a good id.''' response = api.get(url_for('api.oembeds', references='123456789')) assert400(response) assert response.json['message'] == 'Invalid ID.' def test_oembeds_dataset_api_get_without_good_item(self, api): '''It should fail at fetching an oembed with a wrong item.''' user = UserFactory() url = url_for('api.oembeds', references='user-{id}'.format(id=user.id)) response = api.get(url) assert400(response) assert response.json['message'] == 'Invalid object type.' def test_oembeds_dataset_api_get_without_valid_id(self, api): '''It should fail at fetching an oembed without a valid id.''' url = url_for('api.oembeds', references='dataset-123456789') response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown dataset ID.' def test_oembeds_api_for_territory(self, api): '''It should fetch a territory in the oembed format.''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level][TestDataset.id] = TestDataset reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert zone.name in data['html'] assert 'placeholders/default.png' in data['html'] def test_oembeds_api_for_territory_resolve_geoid(self, api): '''It should fetch a territory from a geoid in the oembed format.''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level][TestDataset.id] = TestDataset geoid = '{0.level}:{0.code}@latest'.format(zone) reference = 'territory-{0}:{1}'.format(geoid, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert zone.name in data['html'] assert 'placeholders/default.png' in data['html'] def test_oembeds_api_for_territory_bad_id(self, api): '''Should raise 400 on bad territory ID''' url = url_for('api.oembeds', references='territory-xyz') response = api.get(url) assert400(response) assert response.json['message'] == 'Invalid territory ID.' def test_oembeds_api_for_territory_zone_not_found(self, api): '''Should raise 400 on unknown zone ID''' url = url_for('api.oembeds', references='territory-fr:commune:13004@1970-01-01:xyz') response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown territory identifier.' def test_oembeds_api_for_territory_level_not_registered(self, api): '''Should raise 400 on unregistered territory level''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() del TERRITORY_DATASETS[level] reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown kind of territory.' def test_oembeds_api_for_territory_dataset_not_registered(self, api): '''Should raise 400 on unregistered territory dataset''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level] = {} reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown territory dataset id.'
etalab/udata
udata/tests/api/test_oembed_api.py
udata/core/reuse/csv.py
import copy import logging from bson.objectid import ObjectId from flask import request from werkzeug.urls import Href from elasticsearch_dsl.result import Response from udata.utils import Paginable log = logging.getLogger(__name__) class SearchResult(Paginable, Response): '''An ElasticSearch result wrapper for easy property access''' def __init__(self, query, result, *args, **kwargs): super(SearchResult, self).__init__(result, *args, **kwargs) self.query = query self._objects = None self._facets = None @property def query_string(self): return self.query._query @property def facets(self): if self._facets is None: self._facets = {} for name, facet in self.query.facets.items(): self._facets[name] = facet.get_values( self.get_aggregation(name), self.query.filter_values.get(name, ()) ) return self._facets @property def total(self): try: return self.hits.total except (KeyError, AttributeError): return 0 @property def max_score(self): try: return self.hits.max_score except (KeyError, AttributeError): return 0 @property def page(self): return (self.query.page or 1) if self.pages else 1 @property def page_size(self): return self.query.page_size @property def class_name(self): return self.query.adapter.model.__name__ def get_ids(self): try: return [hit['_id'] for hit in self.hits.hits] except KeyError: return [] def get_objects(self): if not self._objects: ids = [ObjectId(id) for id in self.get_ids()] objects = self.query.model.objects.in_bulk(ids) self._objects = [objects.get(id) for id in ids] # Filter out DBref ie. indexed object not found in DB self._objects = [o for o in self._objects if isinstance(o, self.query.model)] return self._objects @property def objects(self): return self.get_objects() def __iter__(self): for obj in self.get_objects(): yield obj def __len__(self): return len(self.hits.hits) def __getitem__(self, index): return self.get_objects()[index] def get_aggregation(self, name): ''' Fetch an aggregation result given its name As there is no way at this point know the aggregation type (ie. bucket, pipeline or metric) we guess it from the response attributes. Only bucket and metric types are handled ''' agg = self.aggregations[name] if 'buckets' in agg: return agg['buckets'] else: return agg def label_func(self, name): if name not in self.query.facets: return None return self.query.facets[name].labelize def labelize(self, name, value): func = self.label_func(name) return func(value) if func else value
import copy import pytest from flask import url_for from udata import theme from udata.core.dataset.factories import DatasetFactory from udata.core.reuse.factories import ReuseFactory from udata.core.spatial.factories import GeoZoneFactory from udata.core.user.factories import UserFactory from udata.core.organization.factories import OrganizationFactory from udata.features.territories.models import ( TerritoryDataset, TERRITORY_DATASETS ) from udata.frontend.markdown import mdstrip from udata.settings import Testing from udata.utils import faker from udata.tests.helpers import assert200, assert400, assert404, assert_status, assert_cors class OEmbedAPITest: modules = ['core.dataset', 'core.organization', 'core.reuse'] def test_oembed_for_dataset(self, api): '''It should fetch a dataset in the oembed format.''' dataset = DatasetFactory() url = url_for('api.oembed', url=dataset.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_dataset_with_organization(self, api): '''It should fetch a dataset in the oembed format with org.''' organization = OrganizationFactory() dataset = DatasetFactory(organization=organization) url = url_for('api.oembed', url=dataset.external_url) response = api.get(url) assert200(response) assert_cors(response) card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_dataset_redirect_link(self, api): '''It should fetch an oembed dataset using the redirect link.''' dataset = DatasetFactory() redirect_url = url_for('datasets.show_redirect', dataset=dataset, _external=True) url = url_for('api.oembed', url=redirect_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_unknown_dataset(self, api): '''It should raise a 404 on missing dataset.''' dataset_url = url_for('datasets.show', dataset='unknown', _external=True) url = url_for('api.oembed', url=dataset_url) response = api.get(url) assert404(response) assert_cors(response) def test_oembed_for_reuse(self, api): '''It should fetch a reuse in the oembed format.''' reuse = ReuseFactory() url = url_for('api.oembed', url=reuse.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('reuse/card.html', reuse=reuse) assert card in response.json['html'] def test_oembed_for_org(self, api): '''It should fetch an organization in the oembed format.''' org = OrganizationFactory() url = url_for('api.oembed', url=org.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('organization/card.html', organization=org) assert card in response.json['html'] def test_oembed_without_url(self, api): '''It should fail at fetching an oembed without a dataset.''' response = api.get(url_for('api.oembed')) assert400(response) assert 'url' in response.json['errors'] def test_oembed_with_an_invalid_url(self, api): '''It should fail at fetching an oembed with an invalid URL.''' response = api.get(url_for('api.oembed', url='123456789')) assert400(response) assert 'url' in response.json['errors'] def test_oembed_with_an_unknown_url(self, api): '''It should fail at fetching an oembed with an invalid URL.''' url = url_for('api.oembed', url='http://local.test/somewhere') response = api.get(url) assert404(response) assert_cors(response) def test_oembed_with_port_in_https_url(self, api): '''It should works on HTTPS URLs with explicit port.''' dataset = DatasetFactory() url = dataset.external_url.replace('http://local.test/', 'https://local.test:443/') api_url = url_for('api.oembed', url=url) assert200(api.get(api_url, base_url='https://local.test:443/')) def test_oembed_does_not_support_xml(self, api): '''It does not support xml format.''' dataset = DatasetFactory() url = url_for('api.oembed', url=dataset.external_url, format='xml') response = api.get(url) assert_status(response, 501) assert_cors(response) assert response.json['message'] == 'Only JSON format is supported' def territory_dataset_factory(): org = OrganizationFactory() class TestDataset(TerritoryDataset): order = 1 id = faker.word() title = faker.sentence() organization_id = str(org.id) description = faker.paragraph() temporal_coverage = {'start': 2007, 'end': 2012} url_template = 'http://somehere.com/{code}' return TestDataset class OEmbedSettings(Testing): ACTIVATE_TERRITORIES = True class OEmbedsDatasetAPITest: modules = ['core.organization', 'features.territories', 'core.dataset'] settings = OEmbedSettings @pytest.fixture(autouse=True) def copy_territoy_datasets(self): self.territory_datasets_backup = { k: copy.deepcopy(v) for k, v in TERRITORY_DATASETS.items() } yield TERRITORY_DATASETS.update(self.territory_datasets_backup) def test_oembeds_dataset_api_get(self, api): '''It should fetch a dataset in the oembed format.''' dataset = DatasetFactory() url = url_for('api.oembeds', references='dataset-{id}'.format(id=dataset.id)) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert dataset.title in data['html'] assert dataset.external_url in data['html'] assert 'placeholders/default.png' in data['html'] assert mdstrip(dataset.description, 110) in data['html'] def test_oembeds_dataset_api_get_with_organization(self, api): '''It should fetch a dataset in the oembed format with org.''' organization = OrganizationFactory() dataset = DatasetFactory(organization=organization) url = url_for('api.oembeds', references='dataset-{id}'.format(id=dataset.id)) response = api.get(url) assert200(response) data = response.json[0] assert organization.name in data['html'] assert organization.external_url in data['html'] def test_oembeds_dataset_api_get_without_references(self, api): '''It should fail at fetching an oembed without a dataset.''' response = api.get(url_for('api.oembeds')) assert400(response) assert 'references' in response.json['errors'] def test_oembeds_dataset_api_get_without_good_id(self, api): '''It should fail at fetching an oembed without a good id.''' response = api.get(url_for('api.oembeds', references='123456789')) assert400(response) assert response.json['message'] == 'Invalid ID.' def test_oembeds_dataset_api_get_without_good_item(self, api): '''It should fail at fetching an oembed with a wrong item.''' user = UserFactory() url = url_for('api.oembeds', references='user-{id}'.format(id=user.id)) response = api.get(url) assert400(response) assert response.json['message'] == 'Invalid object type.' def test_oembeds_dataset_api_get_without_valid_id(self, api): '''It should fail at fetching an oembed without a valid id.''' url = url_for('api.oembeds', references='dataset-123456789') response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown dataset ID.' def test_oembeds_api_for_territory(self, api): '''It should fetch a territory in the oembed format.''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level][TestDataset.id] = TestDataset reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert zone.name in data['html'] assert 'placeholders/default.png' in data['html'] def test_oembeds_api_for_territory_resolve_geoid(self, api): '''It should fetch a territory from a geoid in the oembed format.''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level][TestDataset.id] = TestDataset geoid = '{0.level}:{0.code}@latest'.format(zone) reference = 'territory-{0}:{1}'.format(geoid, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert zone.name in data['html'] assert 'placeholders/default.png' in data['html'] def test_oembeds_api_for_territory_bad_id(self, api): '''Should raise 400 on bad territory ID''' url = url_for('api.oembeds', references='territory-xyz') response = api.get(url) assert400(response) assert response.json['message'] == 'Invalid territory ID.' def test_oembeds_api_for_territory_zone_not_found(self, api): '''Should raise 400 on unknown zone ID''' url = url_for('api.oembeds', references='territory-fr:commune:13004@1970-01-01:xyz') response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown territory identifier.' def test_oembeds_api_for_territory_level_not_registered(self, api): '''Should raise 400 on unregistered territory level''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() del TERRITORY_DATASETS[level] reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown kind of territory.' def test_oembeds_api_for_territory_dataset_not_registered(self, api): '''Should raise 400 on unregistered territory dataset''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level] = {} reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown territory dataset id.'
etalab/udata
udata/tests/api/test_oembed_api.py
udata/search/result.py
from udata.auth import Permission, UserNeed from udata.core.organization.permissions import ( OrganizationAdminNeed, OrganizationEditorNeed ) class CloseDiscussionPermission(Permission): def __init__(self, discussion): needs = [] subject = discussion.subject if getattr(subject, 'organization'): needs.append(OrganizationAdminNeed(subject.organization.id)) needs.append(OrganizationEditorNeed(subject.organization.id)) elif subject.owner: needs.append(UserNeed(subject.owner.id)) super(CloseDiscussionPermission, self).__init__(*needs)
import copy import pytest from flask import url_for from udata import theme from udata.core.dataset.factories import DatasetFactory from udata.core.reuse.factories import ReuseFactory from udata.core.spatial.factories import GeoZoneFactory from udata.core.user.factories import UserFactory from udata.core.organization.factories import OrganizationFactory from udata.features.territories.models import ( TerritoryDataset, TERRITORY_DATASETS ) from udata.frontend.markdown import mdstrip from udata.settings import Testing from udata.utils import faker from udata.tests.helpers import assert200, assert400, assert404, assert_status, assert_cors class OEmbedAPITest: modules = ['core.dataset', 'core.organization', 'core.reuse'] def test_oembed_for_dataset(self, api): '''It should fetch a dataset in the oembed format.''' dataset = DatasetFactory() url = url_for('api.oembed', url=dataset.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_dataset_with_organization(self, api): '''It should fetch a dataset in the oembed format with org.''' organization = OrganizationFactory() dataset = DatasetFactory(organization=organization) url = url_for('api.oembed', url=dataset.external_url) response = api.get(url) assert200(response) assert_cors(response) card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_dataset_redirect_link(self, api): '''It should fetch an oembed dataset using the redirect link.''' dataset = DatasetFactory() redirect_url = url_for('datasets.show_redirect', dataset=dataset, _external=True) url = url_for('api.oembed', url=redirect_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_unknown_dataset(self, api): '''It should raise a 404 on missing dataset.''' dataset_url = url_for('datasets.show', dataset='unknown', _external=True) url = url_for('api.oembed', url=dataset_url) response = api.get(url) assert404(response) assert_cors(response) def test_oembed_for_reuse(self, api): '''It should fetch a reuse in the oembed format.''' reuse = ReuseFactory() url = url_for('api.oembed', url=reuse.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('reuse/card.html', reuse=reuse) assert card in response.json['html'] def test_oembed_for_org(self, api): '''It should fetch an organization in the oembed format.''' org = OrganizationFactory() url = url_for('api.oembed', url=org.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('organization/card.html', organization=org) assert card in response.json['html'] def test_oembed_without_url(self, api): '''It should fail at fetching an oembed without a dataset.''' response = api.get(url_for('api.oembed')) assert400(response) assert 'url' in response.json['errors'] def test_oembed_with_an_invalid_url(self, api): '''It should fail at fetching an oembed with an invalid URL.''' response = api.get(url_for('api.oembed', url='123456789')) assert400(response) assert 'url' in response.json['errors'] def test_oembed_with_an_unknown_url(self, api): '''It should fail at fetching an oembed with an invalid URL.''' url = url_for('api.oembed', url='http://local.test/somewhere') response = api.get(url) assert404(response) assert_cors(response) def test_oembed_with_port_in_https_url(self, api): '''It should works on HTTPS URLs with explicit port.''' dataset = DatasetFactory() url = dataset.external_url.replace('http://local.test/', 'https://local.test:443/') api_url = url_for('api.oembed', url=url) assert200(api.get(api_url, base_url='https://local.test:443/')) def test_oembed_does_not_support_xml(self, api): '''It does not support xml format.''' dataset = DatasetFactory() url = url_for('api.oembed', url=dataset.external_url, format='xml') response = api.get(url) assert_status(response, 501) assert_cors(response) assert response.json['message'] == 'Only JSON format is supported' def territory_dataset_factory(): org = OrganizationFactory() class TestDataset(TerritoryDataset): order = 1 id = faker.word() title = faker.sentence() organization_id = str(org.id) description = faker.paragraph() temporal_coverage = {'start': 2007, 'end': 2012} url_template = 'http://somehere.com/{code}' return TestDataset class OEmbedSettings(Testing): ACTIVATE_TERRITORIES = True class OEmbedsDatasetAPITest: modules = ['core.organization', 'features.territories', 'core.dataset'] settings = OEmbedSettings @pytest.fixture(autouse=True) def copy_territoy_datasets(self): self.territory_datasets_backup = { k: copy.deepcopy(v) for k, v in TERRITORY_DATASETS.items() } yield TERRITORY_DATASETS.update(self.territory_datasets_backup) def test_oembeds_dataset_api_get(self, api): '''It should fetch a dataset in the oembed format.''' dataset = DatasetFactory() url = url_for('api.oembeds', references='dataset-{id}'.format(id=dataset.id)) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert dataset.title in data['html'] assert dataset.external_url in data['html'] assert 'placeholders/default.png' in data['html'] assert mdstrip(dataset.description, 110) in data['html'] def test_oembeds_dataset_api_get_with_organization(self, api): '''It should fetch a dataset in the oembed format with org.''' organization = OrganizationFactory() dataset = DatasetFactory(organization=organization) url = url_for('api.oembeds', references='dataset-{id}'.format(id=dataset.id)) response = api.get(url) assert200(response) data = response.json[0] assert organization.name in data['html'] assert organization.external_url in data['html'] def test_oembeds_dataset_api_get_without_references(self, api): '''It should fail at fetching an oembed without a dataset.''' response = api.get(url_for('api.oembeds')) assert400(response) assert 'references' in response.json['errors'] def test_oembeds_dataset_api_get_without_good_id(self, api): '''It should fail at fetching an oembed without a good id.''' response = api.get(url_for('api.oembeds', references='123456789')) assert400(response) assert response.json['message'] == 'Invalid ID.' def test_oembeds_dataset_api_get_without_good_item(self, api): '''It should fail at fetching an oembed with a wrong item.''' user = UserFactory() url = url_for('api.oembeds', references='user-{id}'.format(id=user.id)) response = api.get(url) assert400(response) assert response.json['message'] == 'Invalid object type.' def test_oembeds_dataset_api_get_without_valid_id(self, api): '''It should fail at fetching an oembed without a valid id.''' url = url_for('api.oembeds', references='dataset-123456789') response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown dataset ID.' def test_oembeds_api_for_territory(self, api): '''It should fetch a territory in the oembed format.''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level][TestDataset.id] = TestDataset reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert zone.name in data['html'] assert 'placeholders/default.png' in data['html'] def test_oembeds_api_for_territory_resolve_geoid(self, api): '''It should fetch a territory from a geoid in the oembed format.''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level][TestDataset.id] = TestDataset geoid = '{0.level}:{0.code}@latest'.format(zone) reference = 'territory-{0}:{1}'.format(geoid, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert zone.name in data['html'] assert 'placeholders/default.png' in data['html'] def test_oembeds_api_for_territory_bad_id(self, api): '''Should raise 400 on bad territory ID''' url = url_for('api.oembeds', references='territory-xyz') response = api.get(url) assert400(response) assert response.json['message'] == 'Invalid territory ID.' def test_oembeds_api_for_territory_zone_not_found(self, api): '''Should raise 400 on unknown zone ID''' url = url_for('api.oembeds', references='territory-fr:commune:13004@1970-01-01:xyz') response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown territory identifier.' def test_oembeds_api_for_territory_level_not_registered(self, api): '''Should raise 400 on unregistered territory level''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() del TERRITORY_DATASETS[level] reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown kind of territory.' def test_oembeds_api_for_territory_dataset_not_registered(self, api): '''Should raise 400 on unregistered territory dataset''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level] = {} reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown territory dataset id.'
etalab/udata
udata/tests/api/test_oembed_api.py
udata/core/discussions/permissions.py
from udata.utils import safe_unicode from udata.forms import Form, fields, validators from udata.i18n import lazy_gettext as _ from .actions import list_backends from .models import VALIDATION_STATES, VALIDATION_REFUSED __all__ = 'HarvestSourceForm', 'HarvestSourceValidationForm' class HarvestConfigField(fields.DictField): ''' A DictField with extras validations on known configurations ''' def get_backend(self, form): return next(b for b in list_backends() if b.name == form.backend.data) def get_filter_specs(self, backend, key): candidates = (f for f in backend.filters if f.key == key) return next(candidates, None) def get_feature_specs(self, backend, key): candidates = (f for f in backend.features if f.key == key) return next(candidates, None) def pre_validate(self, form): if self.data: backend = self.get_backend(form) # Validate filters for f in (self.data.get('filters') or []): if not ('key' in f and 'value' in f): msg = 'A field should have both key and value properties' raise validators.ValidationError(msg) specs = self.get_filter_specs(backend, f['key']) if not specs: msg = 'Unknown filter key "{0}" for "{1}" backend' msg = msg.format(f['key'], backend.name) raise validators.ValidationError(msg) if isinstance(f['value'], str): f['value'] = safe_unicode(f['value']) # Fix encoding error if not isinstance(f['value'], specs.type): msg = '"{0}" filter should of type "{1}"' msg = msg.format(specs.key, specs.type.__name__) raise validators.ValidationError(msg) # Validate features for key, value in (self.data.get('features') or {}).items(): if not isinstance(value, bool): msg = 'A feature should be a boolean' raise validators.ValidationError(msg) if not self.get_feature_specs(backend, key): msg = 'Unknown feature "{0}" for "{1}" backend' msg = msg.format(key, backend.name) raise validators.ValidationError(msg) class HarvestSourceForm(Form): name = fields.StringField(_('Name'), [validators.DataRequired()]) description = fields.MarkdownField( _('Description'), description=_('Some optional details about this harvester')) url = fields.URLField(_('URL'), [validators.DataRequired()]) backend = fields.SelectField(_('Backend'), choices=lambda: [ (b.name, b.display_name) for b in list_backends() ]) owner = fields.CurrentUserField() organization = fields.PublishAsField(_('Publish as')) active = fields.BooleanField() autoarchive = fields.BooleanField() config = HarvestConfigField() class HarvestSourceValidationForm(Form): state = fields.SelectField(choices=list(VALIDATION_STATES.items())) comment = fields.StringField(_('Comment'), [validators.RequiredIfVal('state', VALIDATION_REFUSED )])
import copy import pytest from flask import url_for from udata import theme from udata.core.dataset.factories import DatasetFactory from udata.core.reuse.factories import ReuseFactory from udata.core.spatial.factories import GeoZoneFactory from udata.core.user.factories import UserFactory from udata.core.organization.factories import OrganizationFactory from udata.features.territories.models import ( TerritoryDataset, TERRITORY_DATASETS ) from udata.frontend.markdown import mdstrip from udata.settings import Testing from udata.utils import faker from udata.tests.helpers import assert200, assert400, assert404, assert_status, assert_cors class OEmbedAPITest: modules = ['core.dataset', 'core.organization', 'core.reuse'] def test_oembed_for_dataset(self, api): '''It should fetch a dataset in the oembed format.''' dataset = DatasetFactory() url = url_for('api.oembed', url=dataset.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_dataset_with_organization(self, api): '''It should fetch a dataset in the oembed format with org.''' organization = OrganizationFactory() dataset = DatasetFactory(organization=organization) url = url_for('api.oembed', url=dataset.external_url) response = api.get(url) assert200(response) assert_cors(response) card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_dataset_redirect_link(self, api): '''It should fetch an oembed dataset using the redirect link.''' dataset = DatasetFactory() redirect_url = url_for('datasets.show_redirect', dataset=dataset, _external=True) url = url_for('api.oembed', url=redirect_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_unknown_dataset(self, api): '''It should raise a 404 on missing dataset.''' dataset_url = url_for('datasets.show', dataset='unknown', _external=True) url = url_for('api.oembed', url=dataset_url) response = api.get(url) assert404(response) assert_cors(response) def test_oembed_for_reuse(self, api): '''It should fetch a reuse in the oembed format.''' reuse = ReuseFactory() url = url_for('api.oembed', url=reuse.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('reuse/card.html', reuse=reuse) assert card in response.json['html'] def test_oembed_for_org(self, api): '''It should fetch an organization in the oembed format.''' org = OrganizationFactory() url = url_for('api.oembed', url=org.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('organization/card.html', organization=org) assert card in response.json['html'] def test_oembed_without_url(self, api): '''It should fail at fetching an oembed without a dataset.''' response = api.get(url_for('api.oembed')) assert400(response) assert 'url' in response.json['errors'] def test_oembed_with_an_invalid_url(self, api): '''It should fail at fetching an oembed with an invalid URL.''' response = api.get(url_for('api.oembed', url='123456789')) assert400(response) assert 'url' in response.json['errors'] def test_oembed_with_an_unknown_url(self, api): '''It should fail at fetching an oembed with an invalid URL.''' url = url_for('api.oembed', url='http://local.test/somewhere') response = api.get(url) assert404(response) assert_cors(response) def test_oembed_with_port_in_https_url(self, api): '''It should works on HTTPS URLs with explicit port.''' dataset = DatasetFactory() url = dataset.external_url.replace('http://local.test/', 'https://local.test:443/') api_url = url_for('api.oembed', url=url) assert200(api.get(api_url, base_url='https://local.test:443/')) def test_oembed_does_not_support_xml(self, api): '''It does not support xml format.''' dataset = DatasetFactory() url = url_for('api.oembed', url=dataset.external_url, format='xml') response = api.get(url) assert_status(response, 501) assert_cors(response) assert response.json['message'] == 'Only JSON format is supported' def territory_dataset_factory(): org = OrganizationFactory() class TestDataset(TerritoryDataset): order = 1 id = faker.word() title = faker.sentence() organization_id = str(org.id) description = faker.paragraph() temporal_coverage = {'start': 2007, 'end': 2012} url_template = 'http://somehere.com/{code}' return TestDataset class OEmbedSettings(Testing): ACTIVATE_TERRITORIES = True class OEmbedsDatasetAPITest: modules = ['core.organization', 'features.territories', 'core.dataset'] settings = OEmbedSettings @pytest.fixture(autouse=True) def copy_territoy_datasets(self): self.territory_datasets_backup = { k: copy.deepcopy(v) for k, v in TERRITORY_DATASETS.items() } yield TERRITORY_DATASETS.update(self.territory_datasets_backup) def test_oembeds_dataset_api_get(self, api): '''It should fetch a dataset in the oembed format.''' dataset = DatasetFactory() url = url_for('api.oembeds', references='dataset-{id}'.format(id=dataset.id)) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert dataset.title in data['html'] assert dataset.external_url in data['html'] assert 'placeholders/default.png' in data['html'] assert mdstrip(dataset.description, 110) in data['html'] def test_oembeds_dataset_api_get_with_organization(self, api): '''It should fetch a dataset in the oembed format with org.''' organization = OrganizationFactory() dataset = DatasetFactory(organization=organization) url = url_for('api.oembeds', references='dataset-{id}'.format(id=dataset.id)) response = api.get(url) assert200(response) data = response.json[0] assert organization.name in data['html'] assert organization.external_url in data['html'] def test_oembeds_dataset_api_get_without_references(self, api): '''It should fail at fetching an oembed without a dataset.''' response = api.get(url_for('api.oembeds')) assert400(response) assert 'references' in response.json['errors'] def test_oembeds_dataset_api_get_without_good_id(self, api): '''It should fail at fetching an oembed without a good id.''' response = api.get(url_for('api.oembeds', references='123456789')) assert400(response) assert response.json['message'] == 'Invalid ID.' def test_oembeds_dataset_api_get_without_good_item(self, api): '''It should fail at fetching an oembed with a wrong item.''' user = UserFactory() url = url_for('api.oembeds', references='user-{id}'.format(id=user.id)) response = api.get(url) assert400(response) assert response.json['message'] == 'Invalid object type.' def test_oembeds_dataset_api_get_without_valid_id(self, api): '''It should fail at fetching an oembed without a valid id.''' url = url_for('api.oembeds', references='dataset-123456789') response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown dataset ID.' def test_oembeds_api_for_territory(self, api): '''It should fetch a territory in the oembed format.''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level][TestDataset.id] = TestDataset reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert zone.name in data['html'] assert 'placeholders/default.png' in data['html'] def test_oembeds_api_for_territory_resolve_geoid(self, api): '''It should fetch a territory from a geoid in the oembed format.''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level][TestDataset.id] = TestDataset geoid = '{0.level}:{0.code}@latest'.format(zone) reference = 'territory-{0}:{1}'.format(geoid, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert zone.name in data['html'] assert 'placeholders/default.png' in data['html'] def test_oembeds_api_for_territory_bad_id(self, api): '''Should raise 400 on bad territory ID''' url = url_for('api.oembeds', references='territory-xyz') response = api.get(url) assert400(response) assert response.json['message'] == 'Invalid territory ID.' def test_oembeds_api_for_territory_zone_not_found(self, api): '''Should raise 400 on unknown zone ID''' url = url_for('api.oembeds', references='territory-fr:commune:13004@1970-01-01:xyz') response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown territory identifier.' def test_oembeds_api_for_territory_level_not_registered(self, api): '''Should raise 400 on unregistered territory level''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() del TERRITORY_DATASETS[level] reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown kind of territory.' def test_oembeds_api_for_territory_dataset_not_registered(self, api): '''Should raise 400 on unregistered territory dataset''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level] = {} reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown territory dataset id.'
etalab/udata
udata/tests/api/test_oembed_api.py
udata/harvest/forms.py
import hashlib import mimetypes import os import zlib from slugify import Slugify CHUNK_SIZE = 2 ** 16 slugify = Slugify(separator='-', to_lower=True, safe_chars='.') def hash(file, hasher): blk_size_to_read = hasher.block_size * CHUNK_SIZE while (True): read_data = file.read(blk_size_to_read) if not read_data: break hasher.update(read_data) return hasher.hexdigest() def sha1(file): '''Perform a SHA1 digest on file''' return hash(file, hashlib.sha1()) def md5(file): '''Perform a MD5 digest on a file''' return hash(file, hashlib.md5()) def crc32(file): '''Perform a CRC digest on a file''' value = zlib.crc32(file.read()) return '%08X' % (value & 0xFFFFFFFF) def mime(url): '''Get the mimetype from an url or a filename''' return mimetypes.guess_type(url)[0] def extension(filename): '''Properly extract the extension from filename''' filename = os.path.basename(filename) extension = None while '.' in filename: filename, ext = os.path.splitext(filename) if ext.startswith('.'): ext = ext[1:] extension = ext if not extension else ext + '.' + extension return extension def normalize(filename): return slugify(filename)
import copy import pytest from flask import url_for from udata import theme from udata.core.dataset.factories import DatasetFactory from udata.core.reuse.factories import ReuseFactory from udata.core.spatial.factories import GeoZoneFactory from udata.core.user.factories import UserFactory from udata.core.organization.factories import OrganizationFactory from udata.features.territories.models import ( TerritoryDataset, TERRITORY_DATASETS ) from udata.frontend.markdown import mdstrip from udata.settings import Testing from udata.utils import faker from udata.tests.helpers import assert200, assert400, assert404, assert_status, assert_cors class OEmbedAPITest: modules = ['core.dataset', 'core.organization', 'core.reuse'] def test_oembed_for_dataset(self, api): '''It should fetch a dataset in the oembed format.''' dataset = DatasetFactory() url = url_for('api.oembed', url=dataset.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_dataset_with_organization(self, api): '''It should fetch a dataset in the oembed format with org.''' organization = OrganizationFactory() dataset = DatasetFactory(organization=organization) url = url_for('api.oembed', url=dataset.external_url) response = api.get(url) assert200(response) assert_cors(response) card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_dataset_redirect_link(self, api): '''It should fetch an oembed dataset using the redirect link.''' dataset = DatasetFactory() redirect_url = url_for('datasets.show_redirect', dataset=dataset, _external=True) url = url_for('api.oembed', url=redirect_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_unknown_dataset(self, api): '''It should raise a 404 on missing dataset.''' dataset_url = url_for('datasets.show', dataset='unknown', _external=True) url = url_for('api.oembed', url=dataset_url) response = api.get(url) assert404(response) assert_cors(response) def test_oembed_for_reuse(self, api): '''It should fetch a reuse in the oembed format.''' reuse = ReuseFactory() url = url_for('api.oembed', url=reuse.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('reuse/card.html', reuse=reuse) assert card in response.json['html'] def test_oembed_for_org(self, api): '''It should fetch an organization in the oembed format.''' org = OrganizationFactory() url = url_for('api.oembed', url=org.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('organization/card.html', organization=org) assert card in response.json['html'] def test_oembed_without_url(self, api): '''It should fail at fetching an oembed without a dataset.''' response = api.get(url_for('api.oembed')) assert400(response) assert 'url' in response.json['errors'] def test_oembed_with_an_invalid_url(self, api): '''It should fail at fetching an oembed with an invalid URL.''' response = api.get(url_for('api.oembed', url='123456789')) assert400(response) assert 'url' in response.json['errors'] def test_oembed_with_an_unknown_url(self, api): '''It should fail at fetching an oembed with an invalid URL.''' url = url_for('api.oembed', url='http://local.test/somewhere') response = api.get(url) assert404(response) assert_cors(response) def test_oembed_with_port_in_https_url(self, api): '''It should works on HTTPS URLs with explicit port.''' dataset = DatasetFactory() url = dataset.external_url.replace('http://local.test/', 'https://local.test:443/') api_url = url_for('api.oembed', url=url) assert200(api.get(api_url, base_url='https://local.test:443/')) def test_oembed_does_not_support_xml(self, api): '''It does not support xml format.''' dataset = DatasetFactory() url = url_for('api.oembed', url=dataset.external_url, format='xml') response = api.get(url) assert_status(response, 501) assert_cors(response) assert response.json['message'] == 'Only JSON format is supported' def territory_dataset_factory(): org = OrganizationFactory() class TestDataset(TerritoryDataset): order = 1 id = faker.word() title = faker.sentence() organization_id = str(org.id) description = faker.paragraph() temporal_coverage = {'start': 2007, 'end': 2012} url_template = 'http://somehere.com/{code}' return TestDataset class OEmbedSettings(Testing): ACTIVATE_TERRITORIES = True class OEmbedsDatasetAPITest: modules = ['core.organization', 'features.territories', 'core.dataset'] settings = OEmbedSettings @pytest.fixture(autouse=True) def copy_territoy_datasets(self): self.territory_datasets_backup = { k: copy.deepcopy(v) for k, v in TERRITORY_DATASETS.items() } yield TERRITORY_DATASETS.update(self.territory_datasets_backup) def test_oembeds_dataset_api_get(self, api): '''It should fetch a dataset in the oembed format.''' dataset = DatasetFactory() url = url_for('api.oembeds', references='dataset-{id}'.format(id=dataset.id)) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert dataset.title in data['html'] assert dataset.external_url in data['html'] assert 'placeholders/default.png' in data['html'] assert mdstrip(dataset.description, 110) in data['html'] def test_oembeds_dataset_api_get_with_organization(self, api): '''It should fetch a dataset in the oembed format with org.''' organization = OrganizationFactory() dataset = DatasetFactory(organization=organization) url = url_for('api.oembeds', references='dataset-{id}'.format(id=dataset.id)) response = api.get(url) assert200(response) data = response.json[0] assert organization.name in data['html'] assert organization.external_url in data['html'] def test_oembeds_dataset_api_get_without_references(self, api): '''It should fail at fetching an oembed without a dataset.''' response = api.get(url_for('api.oembeds')) assert400(response) assert 'references' in response.json['errors'] def test_oembeds_dataset_api_get_without_good_id(self, api): '''It should fail at fetching an oembed without a good id.''' response = api.get(url_for('api.oembeds', references='123456789')) assert400(response) assert response.json['message'] == 'Invalid ID.' def test_oembeds_dataset_api_get_without_good_item(self, api): '''It should fail at fetching an oembed with a wrong item.''' user = UserFactory() url = url_for('api.oembeds', references='user-{id}'.format(id=user.id)) response = api.get(url) assert400(response) assert response.json['message'] == 'Invalid object type.' def test_oembeds_dataset_api_get_without_valid_id(self, api): '''It should fail at fetching an oembed without a valid id.''' url = url_for('api.oembeds', references='dataset-123456789') response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown dataset ID.' def test_oembeds_api_for_territory(self, api): '''It should fetch a territory in the oembed format.''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level][TestDataset.id] = TestDataset reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert zone.name in data['html'] assert 'placeholders/default.png' in data['html'] def test_oembeds_api_for_territory_resolve_geoid(self, api): '''It should fetch a territory from a geoid in the oembed format.''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level][TestDataset.id] = TestDataset geoid = '{0.level}:{0.code}@latest'.format(zone) reference = 'territory-{0}:{1}'.format(geoid, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert zone.name in data['html'] assert 'placeholders/default.png' in data['html'] def test_oembeds_api_for_territory_bad_id(self, api): '''Should raise 400 on bad territory ID''' url = url_for('api.oembeds', references='territory-xyz') response = api.get(url) assert400(response) assert response.json['message'] == 'Invalid territory ID.' def test_oembeds_api_for_territory_zone_not_found(self, api): '''Should raise 400 on unknown zone ID''' url = url_for('api.oembeds', references='territory-fr:commune:13004@1970-01-01:xyz') response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown territory identifier.' def test_oembeds_api_for_territory_level_not_registered(self, api): '''Should raise 400 on unregistered territory level''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() del TERRITORY_DATASETS[level] reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown kind of territory.' def test_oembeds_api_for_territory_dataset_not_registered(self, api): '''Should raise 400 on unregistered territory dataset''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level] = {} reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown territory dataset id.'
etalab/udata
udata/tests/api/test_oembed_api.py
udata/core/storages/utils.py
import datetime from bson import ObjectId from werkzeug.datastructures import MultiDict from udata.auth import login_user from udata.auth.forms import ExtendedLoginForm, ExtendedResetPasswordForm from udata.core.user.factories import UserFactory, AdminFactory from udata.forms import ModelForm, fields from udata.models import db, User from udata.tests import TestCase class CurrentUserFieldTest(TestCase): def factory(self, *args, **kwargs): class Ownable(db.Document): owner = db.ReferenceField(User) class OwnableForm(ModelForm): model_class = Ownable owner = fields.CurrentUserField(*args, **kwargs) return Ownable, OwnableForm def test_empty_values(self): Ownable, OwnableForm = self.factory() user = UserFactory() login_user(user) form = OwnableForm() self.assertEqual(form.owner.data, user) ownable = Ownable() form.populate_obj(ownable) self.assertEqual(ownable.owner, user) def test_initial_value(self): Ownable, OwnableForm = self.factory() user = UserFactory() login_user(user) ownable = Ownable(owner=user) form = OwnableForm(None, obj=ownable) self.assertEqual(form.owner.data, user) def test_with_valid_user_self(self): Ownable, OwnableForm = self.factory() user = UserFactory() login_user(user) form = OwnableForm(MultiDict({ 'owner': str(user.id) })) self.assertEqual(form.owner.data, user) form.validate() self.assertEqual(form.errors, {}) ownable = Ownable() form.populate_obj(ownable) self.assertEqual(ownable.owner, user) def test_with_other_user(self): Ownable, OwnableForm = self.factory() user = UserFactory() other = UserFactory() login_user(user) form = OwnableForm(MultiDict({ 'owner': str(other.id) })) self.assertEqual(form.owner.data, other) form.validate() self.assertIn('owner', form.errors) self.assertEqual(len(form.errors['owner']), 1) def test_with_other_user_admin(self): Ownable, OwnableForm = self.factory() user = UserFactory() admin = AdminFactory() login_user(admin) form = OwnableForm(MultiDict({ 'owner': str(user.id) })) self.assertEqual(form.owner.data, user) form.validate() self.assertEqual(form.errors, {}) ownable = Ownable() form.populate_obj(ownable) self.assertEqual(ownable.owner, user) def test_with_valid_user_self_json(self): Ownable, OwnableForm = self.factory() user = UserFactory() login_user(user) form = OwnableForm.from_json({ 'owner': str(user.id) }) self.assertEqual(form.owner.data, user) form.validate() self.assertEqual(form.errors, {}) ownable = Ownable() form.populate_obj(ownable) self.assertEqual(ownable.owner, user) def test_with_user_null_json(self): Ownable, OwnableForm = self.factory() user = UserFactory() login_user(user) form = OwnableForm.from_json({ 'owner': None }) self.assertEqual(form.owner.data, user) form.validate() self.assertEqual(form.errors, {}) ownable = Ownable() form.populate_obj(ownable) self.assertEqual(ownable.owner, user) def test_with_user_object_self_from_json(self): Ownable, OwnableForm = self.factory() user = UserFactory() login_user(user) form = OwnableForm.from_json({ 'owner': {'id': str(user.id)} }) self.assertEqual(form.owner.data, user) form.validate() self.assertEqual(form.errors, {}) ownable = Ownable() form.populate_obj(ownable) self.assertEqual(ownable.owner, user) def test_with_invalid_data(self): Ownable, OwnableForm = self.factory() user = UserFactory() login_user(user) form = OwnableForm(MultiDict({ 'owner': str('wrongwith12c') })) form.validate() self.assertIn('owner', form.errors) self.assertEqual(len(form.errors['owner']), 1) def test_with_user_not_found(self): Ownable, OwnableForm = self.factory() user = UserFactory() login_user(user) form = OwnableForm(MultiDict({ 'owner': str(ObjectId()) })) form.validate() self.assertIn('owner', form.errors) self.assertEqual(len(form.errors['owner']), 1) def test_with_user_not_logged_found(self): Ownable, OwnableForm = self.factory() user = UserFactory() form = OwnableForm(MultiDict({ 'owner': str(user.id) })) form.validate() self.assertIn('owner', form.errors) self.assertEqual(len(form.errors['owner']), 1) def test_password_rotation(self): today = datetime.datetime.now() user = UserFactory(password='password', password_rotation_demanded=today, confirmed_at=today) form = ExtendedLoginForm.from_json({ 'email': user.email, 'password': 'password' }) form.validate() self.assertIn('Password must be changed for security reasons', form.errors['password'])
import copy import pytest from flask import url_for from udata import theme from udata.core.dataset.factories import DatasetFactory from udata.core.reuse.factories import ReuseFactory from udata.core.spatial.factories import GeoZoneFactory from udata.core.user.factories import UserFactory from udata.core.organization.factories import OrganizationFactory from udata.features.territories.models import ( TerritoryDataset, TERRITORY_DATASETS ) from udata.frontend.markdown import mdstrip from udata.settings import Testing from udata.utils import faker from udata.tests.helpers import assert200, assert400, assert404, assert_status, assert_cors class OEmbedAPITest: modules = ['core.dataset', 'core.organization', 'core.reuse'] def test_oembed_for_dataset(self, api): '''It should fetch a dataset in the oembed format.''' dataset = DatasetFactory() url = url_for('api.oembed', url=dataset.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_dataset_with_organization(self, api): '''It should fetch a dataset in the oembed format with org.''' organization = OrganizationFactory() dataset = DatasetFactory(organization=organization) url = url_for('api.oembed', url=dataset.external_url) response = api.get(url) assert200(response) assert_cors(response) card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_dataset_redirect_link(self, api): '''It should fetch an oembed dataset using the redirect link.''' dataset = DatasetFactory() redirect_url = url_for('datasets.show_redirect', dataset=dataset, _external=True) url = url_for('api.oembed', url=redirect_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('dataset/card.html', dataset=dataset) assert card in response.json['html'] def test_oembed_for_unknown_dataset(self, api): '''It should raise a 404 on missing dataset.''' dataset_url = url_for('datasets.show', dataset='unknown', _external=True) url = url_for('api.oembed', url=dataset_url) response = api.get(url) assert404(response) assert_cors(response) def test_oembed_for_reuse(self, api): '''It should fetch a reuse in the oembed format.''' reuse = ReuseFactory() url = url_for('api.oembed', url=reuse.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('reuse/card.html', reuse=reuse) assert card in response.json['html'] def test_oembed_for_org(self, api): '''It should fetch an organization in the oembed format.''' org = OrganizationFactory() url = url_for('api.oembed', url=org.external_url) response = api.get(url) assert200(response) assert_cors(response) assert 'html' in response.json assert 'width' in response.json assert 'maxwidth' in response.json assert 'height' in response.json assert 'maxheight' in response.json assert response.json['type'] == 'rich' assert response.json['version'] == '1.0' card = theme.render('organization/card.html', organization=org) assert card in response.json['html'] def test_oembed_without_url(self, api): '''It should fail at fetching an oembed without a dataset.''' response = api.get(url_for('api.oembed')) assert400(response) assert 'url' in response.json['errors'] def test_oembed_with_an_invalid_url(self, api): '''It should fail at fetching an oembed with an invalid URL.''' response = api.get(url_for('api.oembed', url='123456789')) assert400(response) assert 'url' in response.json['errors'] def test_oembed_with_an_unknown_url(self, api): '''It should fail at fetching an oembed with an invalid URL.''' url = url_for('api.oembed', url='http://local.test/somewhere') response = api.get(url) assert404(response) assert_cors(response) def test_oembed_with_port_in_https_url(self, api): '''It should works on HTTPS URLs with explicit port.''' dataset = DatasetFactory() url = dataset.external_url.replace('http://local.test/', 'https://local.test:443/') api_url = url_for('api.oembed', url=url) assert200(api.get(api_url, base_url='https://local.test:443/')) def test_oembed_does_not_support_xml(self, api): '''It does not support xml format.''' dataset = DatasetFactory() url = url_for('api.oembed', url=dataset.external_url, format='xml') response = api.get(url) assert_status(response, 501) assert_cors(response) assert response.json['message'] == 'Only JSON format is supported' def territory_dataset_factory(): org = OrganizationFactory() class TestDataset(TerritoryDataset): order = 1 id = faker.word() title = faker.sentence() organization_id = str(org.id) description = faker.paragraph() temporal_coverage = {'start': 2007, 'end': 2012} url_template = 'http://somehere.com/{code}' return TestDataset class OEmbedSettings(Testing): ACTIVATE_TERRITORIES = True class OEmbedsDatasetAPITest: modules = ['core.organization', 'features.territories', 'core.dataset'] settings = OEmbedSettings @pytest.fixture(autouse=True) def copy_territoy_datasets(self): self.territory_datasets_backup = { k: copy.deepcopy(v) for k, v in TERRITORY_DATASETS.items() } yield TERRITORY_DATASETS.update(self.territory_datasets_backup) def test_oembeds_dataset_api_get(self, api): '''It should fetch a dataset in the oembed format.''' dataset = DatasetFactory() url = url_for('api.oembeds', references='dataset-{id}'.format(id=dataset.id)) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert dataset.title in data['html'] assert dataset.external_url in data['html'] assert 'placeholders/default.png' in data['html'] assert mdstrip(dataset.description, 110) in data['html'] def test_oembeds_dataset_api_get_with_organization(self, api): '''It should fetch a dataset in the oembed format with org.''' organization = OrganizationFactory() dataset = DatasetFactory(organization=organization) url = url_for('api.oembeds', references='dataset-{id}'.format(id=dataset.id)) response = api.get(url) assert200(response) data = response.json[0] assert organization.name in data['html'] assert organization.external_url in data['html'] def test_oembeds_dataset_api_get_without_references(self, api): '''It should fail at fetching an oembed without a dataset.''' response = api.get(url_for('api.oembeds')) assert400(response) assert 'references' in response.json['errors'] def test_oembeds_dataset_api_get_without_good_id(self, api): '''It should fail at fetching an oembed without a good id.''' response = api.get(url_for('api.oembeds', references='123456789')) assert400(response) assert response.json['message'] == 'Invalid ID.' def test_oembeds_dataset_api_get_without_good_item(self, api): '''It should fail at fetching an oembed with a wrong item.''' user = UserFactory() url = url_for('api.oembeds', references='user-{id}'.format(id=user.id)) response = api.get(url) assert400(response) assert response.json['message'] == 'Invalid object type.' def test_oembeds_dataset_api_get_without_valid_id(self, api): '''It should fail at fetching an oembed without a valid id.''' url = url_for('api.oembeds', references='dataset-123456789') response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown dataset ID.' def test_oembeds_api_for_territory(self, api): '''It should fetch a territory in the oembed format.''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level][TestDataset.id] = TestDataset reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert zone.name in data['html'] assert 'placeholders/default.png' in data['html'] def test_oembeds_api_for_territory_resolve_geoid(self, api): '''It should fetch a territory from a geoid in the oembed format.''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level][TestDataset.id] = TestDataset geoid = '{0.level}:{0.code}@latest'.format(zone) reference = 'territory-{0}:{1}'.format(geoid, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert200(response) data = response.json[0] assert 'html' in data assert 'width' in data assert 'maxwidth' in data assert 'height' in data assert 'maxheight' in data assert data['type'] == 'rich' assert data['version'] == '1.0' assert zone.name in data['html'] assert 'placeholders/default.png' in data['html'] def test_oembeds_api_for_territory_bad_id(self, api): '''Should raise 400 on bad territory ID''' url = url_for('api.oembeds', references='territory-xyz') response = api.get(url) assert400(response) assert response.json['message'] == 'Invalid territory ID.' def test_oembeds_api_for_territory_zone_not_found(self, api): '''Should raise 400 on unknown zone ID''' url = url_for('api.oembeds', references='territory-fr:commune:13004@1970-01-01:xyz') response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown territory identifier.' def test_oembeds_api_for_territory_level_not_registered(self, api): '''Should raise 400 on unregistered territory level''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() del TERRITORY_DATASETS[level] reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown kind of territory.' def test_oembeds_api_for_territory_dataset_not_registered(self, api): '''Should raise 400 on unregistered territory dataset''' country = faker.country_code().lower() level = 'commune' zone = GeoZoneFactory(level='{0}:{1}'.format(country, level)) TestDataset = territory_dataset_factory() TERRITORY_DATASETS[level] = {} reference = 'territory-{0}:{1}'.format(zone.id, TestDataset.id) url = url_for('api.oembeds', references=reference) response = api.get(url) assert400(response) assert response.json['message'] == 'Unknown territory dataset id.'
etalab/udata
udata/tests/api/test_oembed_api.py
udata/tests/forms/test_current_user_field.py
"""Support for Goal Zero Yeti Sensors.""" from homeassistant.components.binary_sensor import BinarySensorEntity from homeassistant.const import CONF_NAME from . import YetiEntity from .const import BINARY_SENSOR_DICT, DATA_KEY_API, DATA_KEY_COORDINATOR, DOMAIN PARALLEL_UPDATES = 0 async def async_setup_entry(hass, entry, async_add_entities): """Set up the Goal Zero Yeti sensor.""" name = entry.data[CONF_NAME] goalzero_data = hass.data[DOMAIN][entry.entry_id] sensors = [ YetiBinarySensor( goalzero_data[DATA_KEY_API], goalzero_data[DATA_KEY_COORDINATOR], name, sensor_name, entry.entry_id, ) for sensor_name in BINARY_SENSOR_DICT ] async_add_entities(sensors, True) class YetiBinarySensor(YetiEntity, BinarySensorEntity): """Representation of a Goal Zero Yeti sensor.""" def __init__(self, api, coordinator, name, sensor_name, server_unique_id): """Initialize a Goal Zero Yeti sensor.""" super().__init__(api, coordinator, name, server_unique_id) self._condition = sensor_name variable_info = BINARY_SENSOR_DICT[sensor_name] self._condition_name = variable_info[0] self._icon = variable_info[2] self._device_class = variable_info[1] @property def name(self): """Return the name of the sensor.""" return f"{self._name} {self._condition_name}" @property def unique_id(self): """Return the unique id of the sensor.""" return f"{self._server_unique_id}/{self._condition_name}" @property def is_on(self): """Return if the service is on.""" if self.api.data: return self.api.data[self._condition] == 1 return False @property def icon(self): """Icon to use in the frontend, if any.""" return self._icon
"""Test Hue init with multiple bridges.""" from aiohue.groups import Groups from aiohue.lights import Lights from aiohue.scenes import Scenes from aiohue.sensors import Sensors import pytest from homeassistant import config_entries from homeassistant.components import hue from homeassistant.components.hue import sensor_base as hue_sensor_base from homeassistant.setup import async_setup_component from tests.async_mock import Mock, patch async def setup_component(hass): """Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( await async_setup_component( hass, hue.DOMAIN, {}, ) is True ) async def test_hue_activate_scene_both_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes both bridges successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_one_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_zero_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) # both were retried assert mock_hue_activate_scene1.call_count == 2 assert mock_hue_activate_scene2.call_count == 2 async def setup_bridge(hass, mock_bridge, config_entry): """Load the Hue light platform with the provided bridge.""" mock_bridge.config_entry = config_entry hass.data[hue.DOMAIN][config_entry.entry_id] = mock_bridge await hass.config_entries.async_forward_entry_setup(config_entry, "light") # To flush out the service call to update the group await hass.async_block_till_done() @pytest.fixture def mock_config_entry1(hass): """Mock a config entry.""" return create_config_entry() @pytest.fixture def mock_config_entry2(hass): """Mock a config entry.""" return create_config_entry() def create_config_entry(): """Mock a config entry.""" return config_entries.ConfigEntry( 1, hue.DOMAIN, "Mock Title", {"host": "mock-host"}, "test", config_entries.CONN_CLASS_LOCAL_POLL, system_options={}, ) @pytest.fixture def mock_bridge1(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) @pytest.fixture def mock_bridge2(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) def create_mock_bridge(hass): """Create a mock Hue bridge.""" bridge = Mock( hass=hass, available=True, authorized=True, allow_unreachable=False, allow_groups=False, api=Mock(), reset_jobs=[], spec=hue.HueBridge, ) bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = [] async def mock_request(method, path, **kwargs): kwargs["method"] = method kwargs["path"] = path bridge.mock_requests.append(kwargs) return {} async def async_request_call(task): await task() bridge.async_request_call = async_request_call bridge.api.config.apiversion = "9.9.9" bridge.api.lights = Lights({}, mock_request) bridge.api.groups = Groups({}, mock_request) bridge.api.sensors = Sensors({}, mock_request) bridge.api.scenes = Scenes({}, mock_request) return bridge
mezz64/home-assistant
tests/components/hue/test_init_multiple_bridges.py
homeassistant/components/goalzero/binary_sensor.py
"""Describe group states.""" from homeassistant.components.group import GroupIntegrationRegistry from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED from homeassistant.core import callback from homeassistant.helpers.typing import HomeAssistantType @callback def async_describe_on_off_states( hass: HomeAssistantType, registry: GroupIntegrationRegistry ) -> None: """Describe group on off states.""" registry.on_off_states({STATE_LOCKED}, STATE_UNLOCKED)
"""Test Hue init with multiple bridges.""" from aiohue.groups import Groups from aiohue.lights import Lights from aiohue.scenes import Scenes from aiohue.sensors import Sensors import pytest from homeassistant import config_entries from homeassistant.components import hue from homeassistant.components.hue import sensor_base as hue_sensor_base from homeassistant.setup import async_setup_component from tests.async_mock import Mock, patch async def setup_component(hass): """Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( await async_setup_component( hass, hue.DOMAIN, {}, ) is True ) async def test_hue_activate_scene_both_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes both bridges successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_one_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_zero_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) # both were retried assert mock_hue_activate_scene1.call_count == 2 assert mock_hue_activate_scene2.call_count == 2 async def setup_bridge(hass, mock_bridge, config_entry): """Load the Hue light platform with the provided bridge.""" mock_bridge.config_entry = config_entry hass.data[hue.DOMAIN][config_entry.entry_id] = mock_bridge await hass.config_entries.async_forward_entry_setup(config_entry, "light") # To flush out the service call to update the group await hass.async_block_till_done() @pytest.fixture def mock_config_entry1(hass): """Mock a config entry.""" return create_config_entry() @pytest.fixture def mock_config_entry2(hass): """Mock a config entry.""" return create_config_entry() def create_config_entry(): """Mock a config entry.""" return config_entries.ConfigEntry( 1, hue.DOMAIN, "Mock Title", {"host": "mock-host"}, "test", config_entries.CONN_CLASS_LOCAL_POLL, system_options={}, ) @pytest.fixture def mock_bridge1(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) @pytest.fixture def mock_bridge2(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) def create_mock_bridge(hass): """Create a mock Hue bridge.""" bridge = Mock( hass=hass, available=True, authorized=True, allow_unreachable=False, allow_groups=False, api=Mock(), reset_jobs=[], spec=hue.HueBridge, ) bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = [] async def mock_request(method, path, **kwargs): kwargs["method"] = method kwargs["path"] = path bridge.mock_requests.append(kwargs) return {} async def async_request_call(task): await task() bridge.async_request_call = async_request_call bridge.api.config.apiversion = "9.9.9" bridge.api.lights = Lights({}, mock_request) bridge.api.groups = Groups({}, mock_request) bridge.api.sensors = Sensors({}, mock_request) bridge.api.scenes = Scenes({}, mock_request) return bridge
mezz64/home-assistant
tests/components/hue/test_init_multiple_bridges.py
homeassistant/components/lock/group.py
"""Home Assistant representation of an UPnP/IGD.""" import asyncio from ipaddress import IPv4Address from typing import List, Mapping from async_upnp_client import UpnpFactory from async_upnp_client.aiohttp import AiohttpSessionRequester from async_upnp_client.profiles.igd import IgdDevice from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import HomeAssistantType import homeassistant.util.dt as dt_util from .const import ( BYTES_RECEIVED, BYTES_SENT, CONF_LOCAL_IP, DISCOVERY_LOCATION, DISCOVERY_ST, DISCOVERY_UDN, DISCOVERY_USN, DOMAIN, DOMAIN_CONFIG, LOGGER as _LOGGER, PACKETS_RECEIVED, PACKETS_SENT, TIMESTAMP, ) class Device: """Home Assistant representation of an UPnP/IGD.""" def __init__(self, igd_device): """Initialize UPnP/IGD device.""" self._igd_device: IgdDevice = igd_device self._mapped_ports = [] @classmethod async def async_discover(cls, hass: HomeAssistantType) -> List[Mapping]: """Discover UPnP/IGD devices.""" _LOGGER.debug("Discovering UPnP/IGD devices") local_ip = None if DOMAIN in hass.data and DOMAIN_CONFIG in hass.data[DOMAIN]: local_ip = hass.data[DOMAIN][DOMAIN_CONFIG].get(CONF_LOCAL_IP) if local_ip: local_ip = IPv4Address(local_ip) discovery_infos = await IgdDevice.async_search(source_ip=local_ip, timeout=10) # add extra info and store devices devices = [] for discovery_info in discovery_infos: discovery_info[DISCOVERY_UDN] = discovery_info["_udn"] discovery_info[DISCOVERY_ST] = discovery_info["st"] discovery_info[DISCOVERY_LOCATION] = discovery_info["location"] usn = f"{discovery_info[DISCOVERY_UDN]}::{discovery_info[DISCOVERY_ST]}" discovery_info[DISCOVERY_USN] = usn _LOGGER.debug("Discovered device: %s", discovery_info) devices.append(discovery_info) return devices @classmethod async def async_create_device(cls, hass: HomeAssistantType, ssdp_location: str): """Create UPnP/IGD device.""" # build async_upnp_client requester session = async_get_clientsession(hass) requester = AiohttpSessionRequester(session, True, 10) # create async_upnp_client device factory = UpnpFactory(requester, disable_state_variable_validation=True) upnp_device = await factory.async_create_device(ssdp_location) igd_device = IgdDevice(upnp_device, None) return cls(igd_device) @property def udn(self) -> str: """Get the UDN.""" return self._igd_device.udn @property def name(self) -> str: """Get the name.""" return self._igd_device.name @property def manufacturer(self) -> str: """Get the manufacturer.""" return self._igd_device.manufacturer @property def model_name(self) -> str: """Get the model name.""" return self._igd_device.model_name @property def device_type(self) -> str: """Get the device type.""" return self._igd_device.device_type @property def unique_id(self) -> str: """Get the unique id.""" return f"{self.udn}::{self.device_type}" def __str__(self) -> str: """Get string representation.""" return f"IGD Device: {self.name}/{self.udn}" async def async_get_traffic_data(self) -> Mapping[str, any]: """ Get all traffic data in one go. Traffic data consists of: - total bytes sent - total bytes received - total packets sent - total packats received Data is timestamped. """ _LOGGER.debug("Getting traffic statistics from device: %s", self) values = await asyncio.gather( self._igd_device.async_get_total_bytes_received(), self._igd_device.async_get_total_bytes_sent(), self._igd_device.async_get_total_packets_received(), self._igd_device.async_get_total_packets_sent(), ) return { TIMESTAMP: dt_util.utcnow(), BYTES_RECEIVED: values[0], BYTES_SENT: values[1], PACKETS_RECEIVED: values[2], PACKETS_SENT: values[3], }
"""Test Hue init with multiple bridges.""" from aiohue.groups import Groups from aiohue.lights import Lights from aiohue.scenes import Scenes from aiohue.sensors import Sensors import pytest from homeassistant import config_entries from homeassistant.components import hue from homeassistant.components.hue import sensor_base as hue_sensor_base from homeassistant.setup import async_setup_component from tests.async_mock import Mock, patch async def setup_component(hass): """Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( await async_setup_component( hass, hue.DOMAIN, {}, ) is True ) async def test_hue_activate_scene_both_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes both bridges successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_one_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_zero_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) # both were retried assert mock_hue_activate_scene1.call_count == 2 assert mock_hue_activate_scene2.call_count == 2 async def setup_bridge(hass, mock_bridge, config_entry): """Load the Hue light platform with the provided bridge.""" mock_bridge.config_entry = config_entry hass.data[hue.DOMAIN][config_entry.entry_id] = mock_bridge await hass.config_entries.async_forward_entry_setup(config_entry, "light") # To flush out the service call to update the group await hass.async_block_till_done() @pytest.fixture def mock_config_entry1(hass): """Mock a config entry.""" return create_config_entry() @pytest.fixture def mock_config_entry2(hass): """Mock a config entry.""" return create_config_entry() def create_config_entry(): """Mock a config entry.""" return config_entries.ConfigEntry( 1, hue.DOMAIN, "Mock Title", {"host": "mock-host"}, "test", config_entries.CONN_CLASS_LOCAL_POLL, system_options={}, ) @pytest.fixture def mock_bridge1(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) @pytest.fixture def mock_bridge2(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) def create_mock_bridge(hass): """Create a mock Hue bridge.""" bridge = Mock( hass=hass, available=True, authorized=True, allow_unreachable=False, allow_groups=False, api=Mock(), reset_jobs=[], spec=hue.HueBridge, ) bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = [] async def mock_request(method, path, **kwargs): kwargs["method"] = method kwargs["path"] = path bridge.mock_requests.append(kwargs) return {} async def async_request_call(task): await task() bridge.async_request_call = async_request_call bridge.api.config.apiversion = "9.9.9" bridge.api.lights = Lights({}, mock_request) bridge.api.groups = Groups({}, mock_request) bridge.api.sensors = Sensors({}, mock_request) bridge.api.scenes = Scenes({}, mock_request) return bridge
mezz64/home-assistant
tests/components/hue/test_init_multiple_bridges.py
homeassistant/components/upnp/device.py
"""Config flow for UniFi.""" import socket import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, CONF_VERIFY_SSL, ) from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from .const import ( CONF_ALLOW_BANDWIDTH_SENSORS, CONF_ALLOW_UPTIME_SENSORS, CONF_BLOCK_CLIENT, CONF_CONTROLLER, CONF_DETECTION_TIME, CONF_IGNORE_WIRED_BUG, CONF_POE_CLIENTS, CONF_SITE_ID, CONF_SSID_FILTER, CONF_TRACK_CLIENTS, CONF_TRACK_DEVICES, CONF_TRACK_WIRED_CLIENTS, CONTROLLER_ID, DEFAULT_POE_CLIENTS, DOMAIN as UNIFI_DOMAIN, LOGGER, ) from .controller import get_controller from .errors import AlreadyConfigured, AuthenticationRequired, CannotConnect DEFAULT_PORT = 8443 DEFAULT_SITE_ID = "default" DEFAULT_VERIFY_SSL = False @callback def get_controller_id_from_config_entry(config_entry): """Return controller with a matching bridge id.""" return CONTROLLER_ID.format( host=config_entry.data[CONF_CONTROLLER][CONF_HOST], site=config_entry.data[CONF_CONTROLLER][CONF_SITE_ID], ) class UnifiFlowHandler(config_entries.ConfigFlow, domain=UNIFI_DOMAIN): """Handle a UniFi config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL @staticmethod @callback def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return UnifiOptionsFlowHandler(config_entry) def __init__(self): """Initialize the UniFi flow.""" self.config = None self.desc = None self.sites = None async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" errors = {} if user_input is not None: try: self.config = { CONF_HOST: user_input[CONF_HOST], CONF_USERNAME: user_input[CONF_USERNAME], CONF_PASSWORD: user_input[CONF_PASSWORD], CONF_PORT: user_input.get(CONF_PORT), CONF_VERIFY_SSL: user_input.get(CONF_VERIFY_SSL), CONF_SITE_ID: DEFAULT_SITE_ID, } controller = await get_controller(self.hass, **self.config) self.sites = await controller.sites() return await self.async_step_site() except AuthenticationRequired: errors["base"] = "faulty_credentials" except CannotConnect: errors["base"] = "service_unavailable" except Exception: # pylint: disable=broad-except LOGGER.error( "Unknown error connecting with UniFi Controller at %s", user_input[CONF_HOST], ) return self.async_abort(reason="unknown") host = "" if await async_discover_unifi(self.hass): host = "unifi" return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Required(CONF_HOST, default=host): str, vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, vol.Optional(CONF_PORT, default=DEFAULT_PORT): int, vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): bool, } ), errors=errors, ) async def async_step_site(self, user_input=None): """Select site to control.""" errors = {} if user_input is not None: try: desc = user_input.get(CONF_SITE_ID, self.desc) for site in self.sites.values(): if desc == site["desc"]: self.config[CONF_SITE_ID] = site["name"] break for entry in self._async_current_entries(): controller = entry.data[CONF_CONTROLLER] if ( controller[CONF_HOST] == self.config[CONF_HOST] and controller[CONF_SITE_ID] == self.config[CONF_SITE_ID] ): raise AlreadyConfigured data = {CONF_CONTROLLER: self.config} return self.async_create_entry(title=desc, data=data) except AlreadyConfigured: return self.async_abort(reason="already_configured") if len(self.sites) == 1: self.desc = next(iter(self.sites.values()))["desc"] return await self.async_step_site(user_input={}) sites = [] for site in self.sites.values(): sites.append(site["desc"]) return self.async_show_form( step_id="site", data_schema=vol.Schema({vol.Required(CONF_SITE_ID): vol.In(sites)}), errors=errors, ) class UnifiOptionsFlowHandler(config_entries.OptionsFlow): """Handle Unifi options.""" def __init__(self, config_entry): """Initialize UniFi options flow.""" self.config_entry = config_entry self.options = dict(config_entry.options) self.controller = None async def async_step_init(self, user_input=None): """Manage the UniFi options.""" self.controller = self.hass.data[UNIFI_DOMAIN][self.config_entry.entry_id] self.options[CONF_BLOCK_CLIENT] = self.controller.option_block_clients if self.show_advanced_options: return await self.async_step_device_tracker() return await self.async_step_simple_options() async def async_step_simple_options(self, user_input=None): """For simple Jack.""" if user_input is not None: self.options.update(user_input) return await self._update_options() clients_to_block = {} for client in self.controller.api.clients.values(): clients_to_block[ client.mac ] = f"{client.name or client.hostname} ({client.mac})" return self.async_show_form( step_id="simple_options", data_schema=vol.Schema( { vol.Optional( CONF_TRACK_CLIENTS, default=self.controller.option_track_clients, ): bool, vol.Optional( CONF_TRACK_DEVICES, default=self.controller.option_track_devices, ): bool, vol.Optional( CONF_BLOCK_CLIENT, default=self.options[CONF_BLOCK_CLIENT] ): cv.multi_select(clients_to_block), } ), ) async def async_step_device_tracker(self, user_input=None): """Manage the device tracker options.""" if user_input is not None: self.options.update(user_input) return await self.async_step_client_control() ssids = ( set(self.controller.api.wlans) | { f"{wlan.name}{wlan.name_combine_suffix}" for wlan in self.controller.api.wlans.values() if not wlan.name_combine_enabled } | { wlan["name"] for ap in self.controller.api.devices.values() for wlan in ap.wlan_overrides if "name" in wlan } ) ssid_filter = {ssid: ssid for ssid in sorted(list(ssids))} return self.async_show_form( step_id="device_tracker", data_schema=vol.Schema( { vol.Optional( CONF_TRACK_CLIENTS, default=self.controller.option_track_clients, ): bool, vol.Optional( CONF_TRACK_WIRED_CLIENTS, default=self.controller.option_track_wired_clients, ): bool, vol.Optional( CONF_TRACK_DEVICES, default=self.controller.option_track_devices, ): bool, vol.Optional( CONF_SSID_FILTER, default=self.controller.option_ssid_filter ): cv.multi_select(ssid_filter), vol.Optional( CONF_DETECTION_TIME, default=int( self.controller.option_detection_time.total_seconds() ), ): int, vol.Optional( CONF_IGNORE_WIRED_BUG, default=self.controller.option_ignore_wired_bug, ): bool, } ), ) async def async_step_client_control(self, user_input=None): """Manage configuration of network access controlled clients.""" errors = {} if user_input is not None: self.options.update(user_input) return await self.async_step_statistics_sensors() clients_to_block = {} for client in self.controller.api.clients.values(): clients_to_block[ client.mac ] = f"{client.name or client.hostname} ({client.mac})" return self.async_show_form( step_id="client_control", data_schema=vol.Schema( { vol.Optional( CONF_BLOCK_CLIENT, default=self.options[CONF_BLOCK_CLIENT] ): cv.multi_select(clients_to_block), vol.Optional( CONF_POE_CLIENTS, default=self.options.get(CONF_POE_CLIENTS, DEFAULT_POE_CLIENTS), ): bool, } ), errors=errors, ) async def async_step_statistics_sensors(self, user_input=None): """Manage the statistics sensors options.""" if user_input is not None: self.options.update(user_input) return await self._update_options() return self.async_show_form( step_id="statistics_sensors", data_schema=vol.Schema( { vol.Optional( CONF_ALLOW_BANDWIDTH_SENSORS, default=self.controller.option_allow_bandwidth_sensors, ): bool, vol.Optional( CONF_ALLOW_UPTIME_SENSORS, default=self.controller.option_allow_uptime_sensors, ): bool, } ), ) async def _update_options(self): """Update config entry options.""" return self.async_create_entry(title="", data=self.options) async def async_discover_unifi(hass): """Discover UniFi address.""" try: return await hass.async_add_executor_job(socket.gethostbyname, "unifi") except socket.gaierror: return None
"""Test Hue init with multiple bridges.""" from aiohue.groups import Groups from aiohue.lights import Lights from aiohue.scenes import Scenes from aiohue.sensors import Sensors import pytest from homeassistant import config_entries from homeassistant.components import hue from homeassistant.components.hue import sensor_base as hue_sensor_base from homeassistant.setup import async_setup_component from tests.async_mock import Mock, patch async def setup_component(hass): """Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( await async_setup_component( hass, hue.DOMAIN, {}, ) is True ) async def test_hue_activate_scene_both_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes both bridges successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_one_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_zero_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) # both were retried assert mock_hue_activate_scene1.call_count == 2 assert mock_hue_activate_scene2.call_count == 2 async def setup_bridge(hass, mock_bridge, config_entry): """Load the Hue light platform with the provided bridge.""" mock_bridge.config_entry = config_entry hass.data[hue.DOMAIN][config_entry.entry_id] = mock_bridge await hass.config_entries.async_forward_entry_setup(config_entry, "light") # To flush out the service call to update the group await hass.async_block_till_done() @pytest.fixture def mock_config_entry1(hass): """Mock a config entry.""" return create_config_entry() @pytest.fixture def mock_config_entry2(hass): """Mock a config entry.""" return create_config_entry() def create_config_entry(): """Mock a config entry.""" return config_entries.ConfigEntry( 1, hue.DOMAIN, "Mock Title", {"host": "mock-host"}, "test", config_entries.CONN_CLASS_LOCAL_POLL, system_options={}, ) @pytest.fixture def mock_bridge1(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) @pytest.fixture def mock_bridge2(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) def create_mock_bridge(hass): """Create a mock Hue bridge.""" bridge = Mock( hass=hass, available=True, authorized=True, allow_unreachable=False, allow_groups=False, api=Mock(), reset_jobs=[], spec=hue.HueBridge, ) bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = [] async def mock_request(method, path, **kwargs): kwargs["method"] = method kwargs["path"] = path bridge.mock_requests.append(kwargs) return {} async def async_request_call(task): await task() bridge.async_request_call = async_request_call bridge.api.config.apiversion = "9.9.9" bridge.api.lights = Lights({}, mock_request) bridge.api.groups = Groups({}, mock_request) bridge.api.sensors = Sensors({}, mock_request) bridge.api.scenes = Scenes({}, mock_request) return bridge
mezz64/home-assistant
tests/components/hue/test_init_multiple_bridges.py
homeassistant/components/unifi/config_flow.py
"""Config flow for Logitech Squeezebox integration.""" import asyncio import logging from pysqueezebox import Server, async_discover import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, HTTP_UNAUTHORIZED, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession # pylint: disable=unused-import from .const import DEFAULT_PORT, DOMAIN _LOGGER = logging.getLogger(__name__) TIMEOUT = 5 def _base_schema(discovery_info=None): """Generate base schema.""" base_schema = {} if discovery_info and CONF_HOST in discovery_info: base_schema.update( { vol.Required( CONF_HOST, description={"suggested_value": discovery_info[CONF_HOST]}, ): str, } ) else: base_schema.update({vol.Required(CONF_HOST): str}) if discovery_info and CONF_PORT in discovery_info: base_schema.update( { vol.Required( CONF_PORT, default=DEFAULT_PORT, description={"suggested_value": discovery_info[CONF_PORT]}, ): int, } ) else: base_schema.update({vol.Required(CONF_PORT, default=DEFAULT_PORT): int}) base_schema.update( {vol.Optional(CONF_USERNAME): str, vol.Optional(CONF_PASSWORD): str} ) return vol.Schema(base_schema) class SqueezeboxConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Logitech Squeezebox.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize an instance of the squeezebox config flow.""" self.data_schema = _base_schema() self.discovery_info = None async def _discover(self, uuid=None): """Discover an unconfigured LMS server.""" self.discovery_info = None discovery_event = asyncio.Event() def _discovery_callback(server): if server.uuid: # ignore already configured uuids for entry in self._async_current_entries(): if entry.unique_id == server.uuid: return self.discovery_info = { CONF_HOST: server.host, CONF_PORT: server.port, "uuid": server.uuid, } _LOGGER.debug("Discovered server: %s", self.discovery_info) discovery_event.set() discovery_task = self.hass.async_create_task( async_discover(_discovery_callback) ) await discovery_event.wait() discovery_task.cancel() # stop searching as soon as we find server # update with suggested values from discovery self.data_schema = _base_schema(self.discovery_info) async def _validate_input(self, data): """ Validate the user input allows us to connect. Retrieve unique id and abort if already configured. """ server = Server( async_get_clientsession(self.hass), data[CONF_HOST], data[CONF_PORT], data.get(CONF_USERNAME), data.get(CONF_PASSWORD), ) try: status = await server.async_query("serverstatus") if not status: if server.http_status == HTTP_UNAUTHORIZED: return "invalid_auth" return "cannot_connect" except Exception: # pylint: disable=broad-except return "unknown" if "uuid" in status: await self.async_set_unique_id(status["uuid"]) self._abort_if_unique_id_configured() async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" errors = {} if user_input and CONF_HOST in user_input: # update with host provided by user self.data_schema = _base_schema(user_input) return await self.async_step_edit() # no host specified, see if we can discover an unconfigured LMS server try: await asyncio.wait_for(self._discover(), timeout=TIMEOUT) return await self.async_step_edit() except asyncio.TimeoutError: errors["base"] = "no_server_found" # display the form return self.async_show_form( step_id="user", data_schema=vol.Schema({vol.Optional(CONF_HOST): str}), errors=errors, ) async def async_step_edit(self, user_input=None): """Edit a discovered or manually inputted server.""" errors = {} if user_input: error = await self._validate_input(user_input) if not error: return self.async_create_entry( title=user_input[CONF_HOST], data=user_input ) errors["base"] = error return self.async_show_form( step_id="edit", data_schema=self.data_schema, errors=errors ) async def async_step_import(self, config): """Import a config flow from configuration.""" error = await self._validate_input(config) if error: return self.async_abort(reason=error) return self.async_create_entry(title=config[CONF_HOST], data=config) async def async_step_discovery(self, discovery_info): """Handle discovery.""" _LOGGER.debug("Reached discovery flow with info: %s", discovery_info) if "uuid" in discovery_info: await self.async_set_unique_id(discovery_info.pop("uuid")) self._abort_if_unique_id_configured() else: # attempt to connect to server and determine uuid. will fail if password required error = await self._validate_input(discovery_info) if error: await self._async_handle_discovery_without_unique_id() # update schema with suggested values from discovery self.data_schema = _base_schema(discovery_info) # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 self.context.update({"title_placeholders": {"host": discovery_info[CONF_HOST]}}) return await self.async_step_edit()
"""Test Hue init with multiple bridges.""" from aiohue.groups import Groups from aiohue.lights import Lights from aiohue.scenes import Scenes from aiohue.sensors import Sensors import pytest from homeassistant import config_entries from homeassistant.components import hue from homeassistant.components.hue import sensor_base as hue_sensor_base from homeassistant.setup import async_setup_component from tests.async_mock import Mock, patch async def setup_component(hass): """Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( await async_setup_component( hass, hue.DOMAIN, {}, ) is True ) async def test_hue_activate_scene_both_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes both bridges successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_one_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_zero_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) # both were retried assert mock_hue_activate_scene1.call_count == 2 assert mock_hue_activate_scene2.call_count == 2 async def setup_bridge(hass, mock_bridge, config_entry): """Load the Hue light platform with the provided bridge.""" mock_bridge.config_entry = config_entry hass.data[hue.DOMAIN][config_entry.entry_id] = mock_bridge await hass.config_entries.async_forward_entry_setup(config_entry, "light") # To flush out the service call to update the group await hass.async_block_till_done() @pytest.fixture def mock_config_entry1(hass): """Mock a config entry.""" return create_config_entry() @pytest.fixture def mock_config_entry2(hass): """Mock a config entry.""" return create_config_entry() def create_config_entry(): """Mock a config entry.""" return config_entries.ConfigEntry( 1, hue.DOMAIN, "Mock Title", {"host": "mock-host"}, "test", config_entries.CONN_CLASS_LOCAL_POLL, system_options={}, ) @pytest.fixture def mock_bridge1(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) @pytest.fixture def mock_bridge2(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) def create_mock_bridge(hass): """Create a mock Hue bridge.""" bridge = Mock( hass=hass, available=True, authorized=True, allow_unreachable=False, allow_groups=False, api=Mock(), reset_jobs=[], spec=hue.HueBridge, ) bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = [] async def mock_request(method, path, **kwargs): kwargs["method"] = method kwargs["path"] = path bridge.mock_requests.append(kwargs) return {} async def async_request_call(task): await task() bridge.async_request_call = async_request_call bridge.api.config.apiversion = "9.9.9" bridge.api.lights = Lights({}, mock_request) bridge.api.groups = Groups({}, mock_request) bridge.api.sensors = Sensors({}, mock_request) bridge.api.scenes = Scenes({}, mock_request) return bridge
mezz64/home-assistant
tests/components/hue/test_init_multiple_bridges.py
homeassistant/components/squeezebox/config_flow.py
"""Support for PoolSense binary sensors.""" from homeassistant.components.binary_sensor import ( DEVICE_CLASS_PROBLEM, BinarySensorEntity, ) from homeassistant.const import CONF_EMAIL from . import PoolSenseEntity from .const import DOMAIN BINARY_SENSORS = { "pH Status": { "unit": None, "icon": None, "name": "pH Status", "device_class": DEVICE_CLASS_PROBLEM, }, "Chlorine Status": { "unit": None, "icon": None, "name": "Chlorine Status", "device_class": DEVICE_CLASS_PROBLEM, }, } async def async_setup_entry(hass, config_entry, async_add_entities): """Defer sensor setup to the shared sensor module.""" coordinator = hass.data[DOMAIN][config_entry.entry_id] binary_sensors_list = [] for binary_sensor in BINARY_SENSORS: binary_sensors_list.append( PoolSenseBinarySensor( coordinator, config_entry.data[CONF_EMAIL], binary_sensor ) ) async_add_entities(binary_sensors_list, False) class PoolSenseBinarySensor(PoolSenseEntity, BinarySensorEntity): """Representation of PoolSense binary sensors.""" @property def is_on(self): """Return true if the binary sensor is on.""" return self.coordinator.data[self.info_type] == "red" @property def icon(self): """Return the icon.""" return BINARY_SENSORS[self.info_type]["icon"] @property def device_class(self): """Return the class of this device.""" return BINARY_SENSORS[self.info_type]["device_class"] @property def name(self): """Return the name of the binary sensor.""" return f"PoolSense {BINARY_SENSORS[self.info_type]['name']}"
"""Test Hue init with multiple bridges.""" from aiohue.groups import Groups from aiohue.lights import Lights from aiohue.scenes import Scenes from aiohue.sensors import Sensors import pytest from homeassistant import config_entries from homeassistant.components import hue from homeassistant.components.hue import sensor_base as hue_sensor_base from homeassistant.setup import async_setup_component from tests.async_mock import Mock, patch async def setup_component(hass): """Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( await async_setup_component( hass, hue.DOMAIN, {}, ) is True ) async def test_hue_activate_scene_both_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes both bridges successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_one_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_zero_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) # both were retried assert mock_hue_activate_scene1.call_count == 2 assert mock_hue_activate_scene2.call_count == 2 async def setup_bridge(hass, mock_bridge, config_entry): """Load the Hue light platform with the provided bridge.""" mock_bridge.config_entry = config_entry hass.data[hue.DOMAIN][config_entry.entry_id] = mock_bridge await hass.config_entries.async_forward_entry_setup(config_entry, "light") # To flush out the service call to update the group await hass.async_block_till_done() @pytest.fixture def mock_config_entry1(hass): """Mock a config entry.""" return create_config_entry() @pytest.fixture def mock_config_entry2(hass): """Mock a config entry.""" return create_config_entry() def create_config_entry(): """Mock a config entry.""" return config_entries.ConfigEntry( 1, hue.DOMAIN, "Mock Title", {"host": "mock-host"}, "test", config_entries.CONN_CLASS_LOCAL_POLL, system_options={}, ) @pytest.fixture def mock_bridge1(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) @pytest.fixture def mock_bridge2(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) def create_mock_bridge(hass): """Create a mock Hue bridge.""" bridge = Mock( hass=hass, available=True, authorized=True, allow_unreachable=False, allow_groups=False, api=Mock(), reset_jobs=[], spec=hue.HueBridge, ) bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = [] async def mock_request(method, path, **kwargs): kwargs["method"] = method kwargs["path"] = path bridge.mock_requests.append(kwargs) return {} async def async_request_call(task): await task() bridge.async_request_call = async_request_call bridge.api.config.apiversion = "9.9.9" bridge.api.lights = Lights({}, mock_request) bridge.api.groups = Groups({}, mock_request) bridge.api.sensors = Sensors({}, mock_request) bridge.api.scenes = Scenes({}, mock_request) return bridge
mezz64/home-assistant
tests/components/hue/test_init_multiple_bridges.py
homeassistant/components/poolsense/binary_sensor.py
"""Support for EnOcean switches.""" import voluptuous as vol from homeassistant.components.switch import PLATFORM_SCHEMA from homeassistant.const import CONF_ID, CONF_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import ToggleEntity from .device import EnOceanEntity CONF_CHANNEL = "channel" DEFAULT_NAME = "EnOcean Switch" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_ID): vol.All(cv.ensure_list, [vol.Coerce(int)]), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_CHANNEL, default=0): cv.positive_int, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the EnOcean switch platform.""" channel = config.get(CONF_CHANNEL) dev_id = config.get(CONF_ID) dev_name = config.get(CONF_NAME) add_entities([EnOceanSwitch(dev_id, dev_name, channel)]) class EnOceanSwitch(EnOceanEntity, ToggleEntity): """Representation of an EnOcean switch device.""" def __init__(self, dev_id, dev_name, channel): """Initialize the EnOcean switch device.""" super().__init__(dev_id, dev_name) self._light = None self._on_state = False self._on_state2 = False self.channel = channel @property def is_on(self): """Return whether the switch is on or off.""" return self._on_state @property def name(self): """Return the device name.""" return self.dev_name def turn_on(self, **kwargs): """Turn on the switch.""" optional = [0x03] optional.extend(self.dev_id) optional.extend([0xFF, 0x00]) self.send_command( data=[0xD2, 0x01, self.channel & 0xFF, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00], optional=optional, packet_type=0x01, ) self._on_state = True def turn_off(self, **kwargs): """Turn off the switch.""" optional = [0x03] optional.extend(self.dev_id) optional.extend([0xFF, 0x00]) self.send_command( data=[0xD2, 0x01, self.channel & 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], optional=optional, packet_type=0x01, ) self._on_state = False def value_changed(self, packet): """Update the internal state of the switch.""" if packet.data[0] == 0xA5: # power meter telegram, turn on if > 10 watts packet.parse_eep(0x12, 0x01) if packet.parsed["DT"]["raw_value"] == 1: raw_val = packet.parsed["MR"]["raw_value"] divisor = packet.parsed["DIV"]["raw_value"] watts = raw_val / (10 ** divisor) if watts > 1: self._on_state = True self.schedule_update_ha_state() elif packet.data[0] == 0xD2: # actuator status telegram packet.parse_eep(0x01, 0x01) if packet.parsed["CMD"]["raw_value"] == 4: channel = packet.parsed["IO"]["raw_value"] output = packet.parsed["OV"]["raw_value"] if channel == self.channel: self._on_state = output > 0 self.schedule_update_ha_state()
"""Test Hue init with multiple bridges.""" from aiohue.groups import Groups from aiohue.lights import Lights from aiohue.scenes import Scenes from aiohue.sensors import Sensors import pytest from homeassistant import config_entries from homeassistant.components import hue from homeassistant.components.hue import sensor_base as hue_sensor_base from homeassistant.setup import async_setup_component from tests.async_mock import Mock, patch async def setup_component(hass): """Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( await async_setup_component( hass, hue.DOMAIN, {}, ) is True ) async def test_hue_activate_scene_both_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes both bridges successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_one_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_zero_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) # both were retried assert mock_hue_activate_scene1.call_count == 2 assert mock_hue_activate_scene2.call_count == 2 async def setup_bridge(hass, mock_bridge, config_entry): """Load the Hue light platform with the provided bridge.""" mock_bridge.config_entry = config_entry hass.data[hue.DOMAIN][config_entry.entry_id] = mock_bridge await hass.config_entries.async_forward_entry_setup(config_entry, "light") # To flush out the service call to update the group await hass.async_block_till_done() @pytest.fixture def mock_config_entry1(hass): """Mock a config entry.""" return create_config_entry() @pytest.fixture def mock_config_entry2(hass): """Mock a config entry.""" return create_config_entry() def create_config_entry(): """Mock a config entry.""" return config_entries.ConfigEntry( 1, hue.DOMAIN, "Mock Title", {"host": "mock-host"}, "test", config_entries.CONN_CLASS_LOCAL_POLL, system_options={}, ) @pytest.fixture def mock_bridge1(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) @pytest.fixture def mock_bridge2(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) def create_mock_bridge(hass): """Create a mock Hue bridge.""" bridge = Mock( hass=hass, available=True, authorized=True, allow_unreachable=False, allow_groups=False, api=Mock(), reset_jobs=[], spec=hue.HueBridge, ) bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = [] async def mock_request(method, path, **kwargs): kwargs["method"] = method kwargs["path"] = path bridge.mock_requests.append(kwargs) return {} async def async_request_call(task): await task() bridge.async_request_call = async_request_call bridge.api.config.apiversion = "9.9.9" bridge.api.lights = Lights({}, mock_request) bridge.api.groups = Groups({}, mock_request) bridge.api.sensors = Sensors({}, mock_request) bridge.api.scenes = Scenes({}, mock_request) return bridge
mezz64/home-assistant
tests/components/hue/test_init_multiple_bridges.py
homeassistant/components/enocean/switch.py
"""Support for Agent.""" import asyncio from agent import AgentError from agent.a import Agent from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONNECTION, DOMAIN as AGENT_DOMAIN, SERVER_URL ATTRIBUTION = "ispyconnect.com" DEFAULT_BRAND = "Agent DVR by ispyconnect.com" FORWARDS = ["alarm_control_panel", "camera"] async def async_setup(hass, config): """Old way to set up integrations.""" return True async def async_setup_entry(hass, config_entry): """Set up the Agent component.""" hass.data.setdefault(AGENT_DOMAIN, {}) server_origin = config_entry.data[SERVER_URL] agent_client = Agent(server_origin, async_get_clientsession(hass)) try: await agent_client.update() except AgentError as err: await agent_client.close() raise ConfigEntryNotReady from err if not agent_client.is_available: raise ConfigEntryNotReady await agent_client.get_devices() hass.data[AGENT_DOMAIN][config_entry.entry_id] = {CONNECTION: agent_client} device_registry = await dr.async_get_registry(hass) device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, identifiers={(AGENT_DOMAIN, agent_client.unique)}, manufacturer="iSpyConnect", name=f"Agent {agent_client.name}", model="Agent DVR", sw_version=agent_client.version, ) for forward in FORWARDS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, forward) ) return True async def async_unload_entry(hass, config_entry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(config_entry, forward) for forward in FORWARDS ] ) ) await hass.data[AGENT_DOMAIN][config_entry.entry_id][CONNECTION].close() if unload_ok: hass.data[AGENT_DOMAIN].pop(config_entry.entry_id) return unload_ok
"""Test Hue init with multiple bridges.""" from aiohue.groups import Groups from aiohue.lights import Lights from aiohue.scenes import Scenes from aiohue.sensors import Sensors import pytest from homeassistant import config_entries from homeassistant.components import hue from homeassistant.components.hue import sensor_base as hue_sensor_base from homeassistant.setup import async_setup_component from tests.async_mock import Mock, patch async def setup_component(hass): """Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( await async_setup_component( hass, hue.DOMAIN, {}, ) is True ) async def test_hue_activate_scene_both_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes both bridges successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_one_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_zero_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) # both were retried assert mock_hue_activate_scene1.call_count == 2 assert mock_hue_activate_scene2.call_count == 2 async def setup_bridge(hass, mock_bridge, config_entry): """Load the Hue light platform with the provided bridge.""" mock_bridge.config_entry = config_entry hass.data[hue.DOMAIN][config_entry.entry_id] = mock_bridge await hass.config_entries.async_forward_entry_setup(config_entry, "light") # To flush out the service call to update the group await hass.async_block_till_done() @pytest.fixture def mock_config_entry1(hass): """Mock a config entry.""" return create_config_entry() @pytest.fixture def mock_config_entry2(hass): """Mock a config entry.""" return create_config_entry() def create_config_entry(): """Mock a config entry.""" return config_entries.ConfigEntry( 1, hue.DOMAIN, "Mock Title", {"host": "mock-host"}, "test", config_entries.CONN_CLASS_LOCAL_POLL, system_options={}, ) @pytest.fixture def mock_bridge1(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) @pytest.fixture def mock_bridge2(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) def create_mock_bridge(hass): """Create a mock Hue bridge.""" bridge = Mock( hass=hass, available=True, authorized=True, allow_unreachable=False, allow_groups=False, api=Mock(), reset_jobs=[], spec=hue.HueBridge, ) bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = [] async def mock_request(method, path, **kwargs): kwargs["method"] = method kwargs["path"] = path bridge.mock_requests.append(kwargs) return {} async def async_request_call(task): await task() bridge.async_request_call = async_request_call bridge.api.config.apiversion = "9.9.9" bridge.api.lights = Lights({}, mock_request) bridge.api.groups = Groups({}, mock_request) bridge.api.sensors = Sensors({}, mock_request) bridge.api.scenes = Scenes({}, mock_request) return bridge
mezz64/home-assistant
tests/components/hue/test_init_multiple_bridges.py
homeassistant/components/agent_dvr/__init__.py
"""The cert_expiry component.""" from datetime import datetime, timedelta import logging from typing import Optional from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.typing import HomeAssistantType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DEFAULT_PORT, DOMAIN from .errors import TemporaryFailure, ValidationFailure from .helper import get_cert_expiry_timestamp _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(hours=12) async def async_setup(hass, config): """Platform setup, do nothing.""" return True async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry): """Load the saved entities.""" host = entry.data[CONF_HOST] port = entry.data[CONF_PORT] coordinator = CertExpiryDataUpdateCoordinator(hass, host, port) await coordinator.async_refresh() if not coordinator.last_update_success: raise ConfigEntryNotReady hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.entry_id] = coordinator if entry.unique_id is None: hass.config_entries.async_update_entry(entry, unique_id=f"{host}:{port}") hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, "sensor") ) return True async def async_unload_entry(hass, entry): """Unload a config entry.""" return await hass.config_entries.async_forward_entry_unload(entry, "sensor") class CertExpiryDataUpdateCoordinator(DataUpdateCoordinator[datetime]): """Class to manage fetching Cert Expiry data from single endpoint.""" def __init__(self, hass, host, port): """Initialize global Cert Expiry data updater.""" self.host = host self.port = port self.cert_error = None self.is_cert_valid = False display_port = f":{port}" if port != DEFAULT_PORT else "" name = f"{self.host}{display_port}" super().__init__( hass, _LOGGER, name=name, update_interval=SCAN_INTERVAL, ) async def _async_update_data(self) -> Optional[datetime]: """Fetch certificate.""" try: timestamp = await get_cert_expiry_timestamp(self.hass, self.host, self.port) except TemporaryFailure as err: raise UpdateFailed(err.args[0]) from err except ValidationFailure as err: self.cert_error = err self.is_cert_valid = False _LOGGER.error("Certificate validation error: %s [%s]", self.host, err) return None self.cert_error = None self.is_cert_valid = True return timestamp
"""Test Hue init with multiple bridges.""" from aiohue.groups import Groups from aiohue.lights import Lights from aiohue.scenes import Scenes from aiohue.sensors import Sensors import pytest from homeassistant import config_entries from homeassistant.components import hue from homeassistant.components.hue import sensor_base as hue_sensor_base from homeassistant.setup import async_setup_component from tests.async_mock import Mock, patch async def setup_component(hass): """Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( await async_setup_component( hass, hue.DOMAIN, {}, ) is True ) async def test_hue_activate_scene_both_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes both bridges successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_one_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_zero_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) # both were retried assert mock_hue_activate_scene1.call_count == 2 assert mock_hue_activate_scene2.call_count == 2 async def setup_bridge(hass, mock_bridge, config_entry): """Load the Hue light platform with the provided bridge.""" mock_bridge.config_entry = config_entry hass.data[hue.DOMAIN][config_entry.entry_id] = mock_bridge await hass.config_entries.async_forward_entry_setup(config_entry, "light") # To flush out the service call to update the group await hass.async_block_till_done() @pytest.fixture def mock_config_entry1(hass): """Mock a config entry.""" return create_config_entry() @pytest.fixture def mock_config_entry2(hass): """Mock a config entry.""" return create_config_entry() def create_config_entry(): """Mock a config entry.""" return config_entries.ConfigEntry( 1, hue.DOMAIN, "Mock Title", {"host": "mock-host"}, "test", config_entries.CONN_CLASS_LOCAL_POLL, system_options={}, ) @pytest.fixture def mock_bridge1(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) @pytest.fixture def mock_bridge2(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) def create_mock_bridge(hass): """Create a mock Hue bridge.""" bridge = Mock( hass=hass, available=True, authorized=True, allow_unreachable=False, allow_groups=False, api=Mock(), reset_jobs=[], spec=hue.HueBridge, ) bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = [] async def mock_request(method, path, **kwargs): kwargs["method"] = method kwargs["path"] = path bridge.mock_requests.append(kwargs) return {} async def async_request_call(task): await task() bridge.async_request_call = async_request_call bridge.api.config.apiversion = "9.9.9" bridge.api.lights = Lights({}, mock_request) bridge.api.groups = Groups({}, mock_request) bridge.api.sensors = Sensors({}, mock_request) bridge.api.scenes = Scenes({}, mock_request) return bridge
mezz64/home-assistant
tests/components/hue/test_init_multiple_bridges.py
homeassistant/components/cert_expiry/__init__.py
"""Support for consuming values for the Volkszaehler API.""" from datetime import timedelta import logging from volkszaehler import Volkszaehler from volkszaehler.exceptions import VolkszaehlerApiConnectionError import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_HOST, CONF_MONITORED_CONDITIONS, CONF_NAME, CONF_PORT, ENERGY_WATT_HOUR, POWER_WATT, ) from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) CONF_UUID = "uuid" DEFAULT_HOST = "localhost" DEFAULT_NAME = "Volkszaehler" DEFAULT_PORT = 80 MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=1) SENSOR_TYPES = { "average": ["Average", POWER_WATT, "mdi:power-off"], "consumption": ["Consumption", ENERGY_WATT_HOUR, "mdi:power-plug"], "max": ["Max", POWER_WATT, "mdi:arrow-up"], "min": ["Min", POWER_WATT, "mdi:arrow-down"], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_UUID): cv.string, vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_MONITORED_CONDITIONS, default=["average"]): vol.All( cv.ensure_list, [vol.In(SENSOR_TYPES)] ), } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Volkszaehler sensors.""" host = config[CONF_HOST] name = config[CONF_NAME] port = config[CONF_PORT] uuid = config[CONF_UUID] conditions = config[CONF_MONITORED_CONDITIONS] session = async_get_clientsession(hass) vz_api = VolkszaehlerData( Volkszaehler(hass.loop, session, uuid, host=host, port=port) ) await vz_api.async_update() if vz_api.api.data is None: raise PlatformNotReady dev = [] for condition in conditions: dev.append(VolkszaehlerSensor(vz_api, name, condition)) async_add_entities(dev, True) class VolkszaehlerSensor(Entity): """Implementation of a Volkszaehler sensor.""" def __init__(self, vz_api, name, sensor_type): """Initialize the Volkszaehler sensor.""" self.vz_api = vz_api self._name = name self.type = sensor_type self._state = None @property def name(self): """Return the name of the sensor.""" return "{} {}".format(self._name, SENSOR_TYPES[self.type][0]) @property def icon(self): """Icon to use in the frontend, if any.""" return SENSOR_TYPES[self.type][2] @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return SENSOR_TYPES[self.type][1] @property def available(self): """Could the device be accessed during the last update call.""" return self.vz_api.available @property def state(self): """Return the state of the resources.""" return self._state async def async_update(self): """Get the latest data from REST API.""" await self.vz_api.async_update() if self.vz_api.api.data is not None: self._state = round(getattr(self.vz_api.api, self.type), 2) class VolkszaehlerData: """The class for handling the data retrieval from the Volkszaehler API.""" def __init__(self, api): """Initialize the data object.""" self.api = api self.available = True @Throttle(MIN_TIME_BETWEEN_UPDATES) async def async_update(self): """Get the latest data from the Volkszaehler REST API.""" try: await self.api.get_data() self.available = True except VolkszaehlerApiConnectionError: _LOGGER.error("Unable to fetch data from the Volkszaehler API") self.available = False
"""Test Hue init with multiple bridges.""" from aiohue.groups import Groups from aiohue.lights import Lights from aiohue.scenes import Scenes from aiohue.sensors import Sensors import pytest from homeassistant import config_entries from homeassistant.components import hue from homeassistant.components.hue import sensor_base as hue_sensor_base from homeassistant.setup import async_setup_component from tests.async_mock import Mock, patch async def setup_component(hass): """Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( await async_setup_component( hass, hue.DOMAIN, {}, ) is True ) async def test_hue_activate_scene_both_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes both bridges successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_one_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_zero_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) # both were retried assert mock_hue_activate_scene1.call_count == 2 assert mock_hue_activate_scene2.call_count == 2 async def setup_bridge(hass, mock_bridge, config_entry): """Load the Hue light platform with the provided bridge.""" mock_bridge.config_entry = config_entry hass.data[hue.DOMAIN][config_entry.entry_id] = mock_bridge await hass.config_entries.async_forward_entry_setup(config_entry, "light") # To flush out the service call to update the group await hass.async_block_till_done() @pytest.fixture def mock_config_entry1(hass): """Mock a config entry.""" return create_config_entry() @pytest.fixture def mock_config_entry2(hass): """Mock a config entry.""" return create_config_entry() def create_config_entry(): """Mock a config entry.""" return config_entries.ConfigEntry( 1, hue.DOMAIN, "Mock Title", {"host": "mock-host"}, "test", config_entries.CONN_CLASS_LOCAL_POLL, system_options={}, ) @pytest.fixture def mock_bridge1(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) @pytest.fixture def mock_bridge2(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) def create_mock_bridge(hass): """Create a mock Hue bridge.""" bridge = Mock( hass=hass, available=True, authorized=True, allow_unreachable=False, allow_groups=False, api=Mock(), reset_jobs=[], spec=hue.HueBridge, ) bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = [] async def mock_request(method, path, **kwargs): kwargs["method"] = method kwargs["path"] = path bridge.mock_requests.append(kwargs) return {} async def async_request_call(task): await task() bridge.async_request_call = async_request_call bridge.api.config.apiversion = "9.9.9" bridge.api.lights = Lights({}, mock_request) bridge.api.groups = Groups({}, mock_request) bridge.api.sensors = Sensors({}, mock_request) bridge.api.scenes = Scenes({}, mock_request) return bridge
mezz64/home-assistant
tests/components/hue/test_init_multiple_bridges.py
homeassistant/components/volkszaehler/sensor.py
"""Device tracker constants.""" from datetime import timedelta import logging LOGGER = logging.getLogger(__package__) DOMAIN = "device_tracker" PLATFORM_TYPE_LEGACY = "legacy" PLATFORM_TYPE_ENTITY = "entity_platform" SOURCE_TYPE_GPS = "gps" SOURCE_TYPE_ROUTER = "router" SOURCE_TYPE_BLUETOOTH = "bluetooth" SOURCE_TYPE_BLUETOOTH_LE = "bluetooth_le" CONF_SCAN_INTERVAL = "interval_seconds" SCAN_INTERVAL = timedelta(seconds=12) CONF_TRACK_NEW = "track_new_devices" DEFAULT_TRACK_NEW = True CONF_CONSIDER_HOME = "consider_home" DEFAULT_CONSIDER_HOME = timedelta(seconds=180) CONF_NEW_DEVICE_DEFAULTS = "new_device_defaults" ATTR_ATTRIBUTES = "attributes" ATTR_BATTERY = "battery" ATTR_DEV_ID = "dev_id" ATTR_GPS = "gps" ATTR_HOST_NAME = "host_name" ATTR_LOCATION_NAME = "location_name" ATTR_MAC = "mac" ATTR_SOURCE_TYPE = "source_type" ATTR_CONSIDER_HOME = "consider_home"
"""Test Hue init with multiple bridges.""" from aiohue.groups import Groups from aiohue.lights import Lights from aiohue.scenes import Scenes from aiohue.sensors import Sensors import pytest from homeassistant import config_entries from homeassistant.components import hue from homeassistant.components.hue import sensor_base as hue_sensor_base from homeassistant.setup import async_setup_component from tests.async_mock import Mock, patch async def setup_component(hass): """Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( await async_setup_component( hass, hue.DOMAIN, {}, ) is True ) async def test_hue_activate_scene_both_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes both bridges successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_one_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_zero_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) # both were retried assert mock_hue_activate_scene1.call_count == 2 assert mock_hue_activate_scene2.call_count == 2 async def setup_bridge(hass, mock_bridge, config_entry): """Load the Hue light platform with the provided bridge.""" mock_bridge.config_entry = config_entry hass.data[hue.DOMAIN][config_entry.entry_id] = mock_bridge await hass.config_entries.async_forward_entry_setup(config_entry, "light") # To flush out the service call to update the group await hass.async_block_till_done() @pytest.fixture def mock_config_entry1(hass): """Mock a config entry.""" return create_config_entry() @pytest.fixture def mock_config_entry2(hass): """Mock a config entry.""" return create_config_entry() def create_config_entry(): """Mock a config entry.""" return config_entries.ConfigEntry( 1, hue.DOMAIN, "Mock Title", {"host": "mock-host"}, "test", config_entries.CONN_CLASS_LOCAL_POLL, system_options={}, ) @pytest.fixture def mock_bridge1(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) @pytest.fixture def mock_bridge2(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) def create_mock_bridge(hass): """Create a mock Hue bridge.""" bridge = Mock( hass=hass, available=True, authorized=True, allow_unreachable=False, allow_groups=False, api=Mock(), reset_jobs=[], spec=hue.HueBridge, ) bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = [] async def mock_request(method, path, **kwargs): kwargs["method"] = method kwargs["path"] = path bridge.mock_requests.append(kwargs) return {} async def async_request_call(task): await task() bridge.async_request_call = async_request_call bridge.api.config.apiversion = "9.9.9" bridge.api.lights = Lights({}, mock_request) bridge.api.groups = Groups({}, mock_request) bridge.api.sensors = Sensors({}, mock_request) bridge.api.scenes = Scenes({}, mock_request) return bridge
mezz64/home-assistant
tests/components/hue/test_init_multiple_bridges.py
homeassistant/components/device_tracker/const.py
"""The Netatmo data handler.""" from collections import deque from datetime import timedelta from functools import partial from itertools import islice import logging from time import time from typing import Deque, Dict, List import pyatmo from homeassistant.config_entries import ConfigEntry from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.event import async_track_time_interval from .const import AUTH, DOMAIN, MANUFACTURER _LOGGER = logging.getLogger(__name__) CAMERA_DATA_CLASS_NAME = "CameraData" WEATHERSTATION_DATA_CLASS_NAME = "WeatherStationData" HOMECOACH_DATA_CLASS_NAME = "HomeCoachData" HOMEDATA_DATA_CLASS_NAME = "HomeData" HOMESTATUS_DATA_CLASS_NAME = "HomeStatus" PUBLICDATA_DATA_CLASS_NAME = "PublicData" NEXT_SCAN = "next_scan" DATA_CLASSES = { WEATHERSTATION_DATA_CLASS_NAME: pyatmo.WeatherStationData, HOMECOACH_DATA_CLASS_NAME: pyatmo.HomeCoachData, CAMERA_DATA_CLASS_NAME: pyatmo.CameraData, HOMEDATA_DATA_CLASS_NAME: pyatmo.HomeData, HOMESTATUS_DATA_CLASS_NAME: pyatmo.HomeStatus, PUBLICDATA_DATA_CLASS_NAME: pyatmo.PublicData, } BATCH_SIZE = 3 DEFAULT_INTERVALS = { HOMEDATA_DATA_CLASS_NAME: 900, HOMESTATUS_DATA_CLASS_NAME: 300, CAMERA_DATA_CLASS_NAME: 900, WEATHERSTATION_DATA_CLASS_NAME: 600, HOMECOACH_DATA_CLASS_NAME: 300, PUBLICDATA_DATA_CLASS_NAME: 600, } SCAN_INTERVAL = 60 class NetatmoDataHandler: """Manages the Netatmo data handling.""" def __init__(self, hass: HomeAssistant, entry: ConfigEntry): """Initialize self.""" self.hass = hass self._auth = hass.data[DOMAIN][entry.entry_id][AUTH] self.listeners: List[CALLBACK_TYPE] = [] self._data_classes: Dict = {} self.data = {} self._queue: Deque = deque() self._webhook: bool = False async def async_setup(self): """Set up the Netatmo data handler.""" async_track_time_interval( self.hass, self.async_update, timedelta(seconds=SCAN_INTERVAL) ) self.listeners.append( async_dispatcher_connect( self.hass, f"signal-{DOMAIN}-webhook-None", self.handle_event, ) ) async def async_update(self, event_time): """ Update device. We do up to BATCH_SIZE calls in one update in order to minimize the calls on the api service. """ for data_class in islice(self._queue, 0, BATCH_SIZE): if data_class[NEXT_SCAN] > time(): continue self._data_classes[data_class["name"]][NEXT_SCAN] = ( time() + data_class["interval"] ) await self.async_fetch_data( data_class["class"], data_class["name"], **data_class["kwargs"] ) self._queue.rotate(BATCH_SIZE) async def async_cleanup(self): """Clean up the Netatmo data handler.""" for listener in self.listeners: listener() async def handle_event(self, event): """Handle webhook events.""" if event["data"]["push_type"] == "webhook_activation": _LOGGER.info("%s webhook successfully registered", MANUFACTURER) self._webhook = True elif event["data"]["push_type"] == "NACamera-connection": _LOGGER.debug("%s camera reconnected", MANUFACTURER) self._data_classes[CAMERA_DATA_CLASS_NAME][NEXT_SCAN] = time() async def async_fetch_data(self, data_class, data_class_entry, **kwargs): """Fetch data and notify.""" try: self.data[data_class_entry] = await self.hass.async_add_executor_job( partial(data_class, **kwargs), self._auth, ) for update_callback in self._data_classes[data_class_entry][ "subscriptions" ]: if update_callback: update_callback() except (pyatmo.NoDevice, pyatmo.ApiError) as err: _LOGGER.debug(err) async def register_data_class( self, data_class_name, data_class_entry, update_callback, **kwargs ): """Register data class.""" if data_class_entry in self._data_classes: self._data_classes[data_class_entry]["subscriptions"].append( update_callback ) return self._data_classes[data_class_entry] = { "class": DATA_CLASSES[data_class_name], "name": data_class_entry, "interval": DEFAULT_INTERVALS[data_class_name], NEXT_SCAN: time() + DEFAULT_INTERVALS[data_class_name], "kwargs": kwargs, "subscriptions": [update_callback], } await self.async_fetch_data( DATA_CLASSES[data_class_name], data_class_entry, **kwargs ) self._queue.append(self._data_classes[data_class_entry]) _LOGGER.debug("Data class %s added", data_class_entry) async def unregister_data_class(self, data_class_entry, update_callback): """Unregister data class.""" if update_callback not in self._data_classes[data_class_entry]["subscriptions"]: return self._data_classes[data_class_entry]["subscriptions"].remove(update_callback) if not self._data_classes[data_class_entry].get("subscriptions"): self._queue.remove(self._data_classes[data_class_entry]) self._data_classes.pop(data_class_entry) _LOGGER.debug("Data class %s removed", data_class_entry) @property def webhook(self) -> bool: """Return the webhook state.""" return self._webhook
"""Test Hue init with multiple bridges.""" from aiohue.groups import Groups from aiohue.lights import Lights from aiohue.scenes import Scenes from aiohue.sensors import Sensors import pytest from homeassistant import config_entries from homeassistant.components import hue from homeassistant.components.hue import sensor_base as hue_sensor_base from homeassistant.setup import async_setup_component from tests.async_mock import Mock, patch async def setup_component(hass): """Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( await async_setup_component( hass, hue.DOMAIN, {}, ) is True ) async def test_hue_activate_scene_both_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes both bridges successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_one_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_zero_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) # both were retried assert mock_hue_activate_scene1.call_count == 2 assert mock_hue_activate_scene2.call_count == 2 async def setup_bridge(hass, mock_bridge, config_entry): """Load the Hue light platform with the provided bridge.""" mock_bridge.config_entry = config_entry hass.data[hue.DOMAIN][config_entry.entry_id] = mock_bridge await hass.config_entries.async_forward_entry_setup(config_entry, "light") # To flush out the service call to update the group await hass.async_block_till_done() @pytest.fixture def mock_config_entry1(hass): """Mock a config entry.""" return create_config_entry() @pytest.fixture def mock_config_entry2(hass): """Mock a config entry.""" return create_config_entry() def create_config_entry(): """Mock a config entry.""" return config_entries.ConfigEntry( 1, hue.DOMAIN, "Mock Title", {"host": "mock-host"}, "test", config_entries.CONN_CLASS_LOCAL_POLL, system_options={}, ) @pytest.fixture def mock_bridge1(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) @pytest.fixture def mock_bridge2(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) def create_mock_bridge(hass): """Create a mock Hue bridge.""" bridge = Mock( hass=hass, available=True, authorized=True, allow_unreachable=False, allow_groups=False, api=Mock(), reset_jobs=[], spec=hue.HueBridge, ) bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = [] async def mock_request(method, path, **kwargs): kwargs["method"] = method kwargs["path"] = path bridge.mock_requests.append(kwargs) return {} async def async_request_call(task): await task() bridge.async_request_call = async_request_call bridge.api.config.apiversion = "9.9.9" bridge.api.lights = Lights({}, mock_request) bridge.api.groups = Groups({}, mock_request) bridge.api.sensors = Sensors({}, mock_request) bridge.api.scenes = Scenes({}, mock_request) return bridge
mezz64/home-assistant
tests/components/hue/test_init_multiple_bridges.py
homeassistant/components/netatmo/data_handler.py
"""Support for Enviro pHAT sensors.""" from datetime import timedelta import importlib import logging import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_DISPLAY_OPTIONS, CONF_NAME, PRESSURE_HPA, TEMP_CELSIUS, VOLT, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "envirophat" CONF_USE_LEDS = "use_leds" MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60) SENSOR_TYPES = { "light": ["light", " ", "mdi:weather-sunny"], "light_red": ["light_red", " ", "mdi:invert-colors"], "light_green": ["light_green", " ", "mdi:invert-colors"], "light_blue": ["light_blue", " ", "mdi:invert-colors"], "accelerometer_x": ["accelerometer_x", "G", "mdi:earth"], "accelerometer_y": ["accelerometer_y", "G", "mdi:earth"], "accelerometer_z": ["accelerometer_z", "G", "mdi:earth"], "magnetometer_x": ["magnetometer_x", " ", "mdi:magnet"], "magnetometer_y": ["magnetometer_y", " ", "mdi:magnet"], "magnetometer_z": ["magnetometer_z", " ", "mdi:magnet"], "temperature": ["temperature", TEMP_CELSIUS, "mdi:thermometer"], "pressure": ["pressure", PRESSURE_HPA, "mdi:gauge"], "voltage_0": ["voltage_0", VOLT, "mdi:flash"], "voltage_1": ["voltage_1", VOLT, "mdi:flash"], "voltage_2": ["voltage_2", VOLT, "mdi:flash"], "voltage_3": ["voltage_3", VOLT, "mdi:flash"], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_DISPLAY_OPTIONS, default=list(SENSOR_TYPES)): [ vol.In(SENSOR_TYPES) ], vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_USE_LEDS, default=False): cv.boolean, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Sense HAT sensor platform.""" try: envirophat = importlib.import_module("envirophat") except OSError: _LOGGER.error("No Enviro pHAT was found") return False data = EnvirophatData(envirophat, config.get(CONF_USE_LEDS)) dev = [] for variable in config[CONF_DISPLAY_OPTIONS]: dev.append(EnvirophatSensor(data, variable)) add_entities(dev, True) class EnvirophatSensor(Entity): """Representation of an Enviro pHAT sensor.""" def __init__(self, data, sensor_types): """Initialize the sensor.""" self.data = data self._name = SENSOR_TYPES[sensor_types][0] self._unit_of_measurement = SENSOR_TYPES[sensor_types][1] self.type = sensor_types self._state = None @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def icon(self): """Icon to use in the frontend, if any.""" return SENSOR_TYPES[self.type][2] @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return self._unit_of_measurement def update(self): """Get the latest data and updates the states.""" self.data.update() if self.type == "light": self._state = self.data.light if self.type == "light_red": self._state = self.data.light_red if self.type == "light_green": self._state = self.data.light_green if self.type == "light_blue": self._state = self.data.light_blue if self.type == "accelerometer_x": self._state = self.data.accelerometer_x if self.type == "accelerometer_y": self._state = self.data.accelerometer_y if self.type == "accelerometer_z": self._state = self.data.accelerometer_z if self.type == "magnetometer_x": self._state = self.data.magnetometer_x if self.type == "magnetometer_y": self._state = self.data.magnetometer_y if self.type == "magnetometer_z": self._state = self.data.magnetometer_z if self.type == "temperature": self._state = self.data.temperature if self.type == "pressure": self._state = self.data.pressure if self.type == "voltage_0": self._state = self.data.voltage_0 if self.type == "voltage_1": self._state = self.data.voltage_1 if self.type == "voltage_2": self._state = self.data.voltage_2 if self.type == "voltage_3": self._state = self.data.voltage_3 class EnvirophatData: """Get the latest data and update.""" def __init__(self, envirophat, use_leds): """Initialize the data object.""" self.envirophat = envirophat self.use_leds = use_leds # sensors readings self.light = None self.light_red = None self.light_green = None self.light_blue = None self.accelerometer_x = None self.accelerometer_y = None self.accelerometer_z = None self.magnetometer_x = None self.magnetometer_y = None self.magnetometer_z = None self.temperature = None self.pressure = None self.voltage_0 = None self.voltage_1 = None self.voltage_2 = None self.voltage_3 = None @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data from Enviro pHAT.""" # Light sensor reading: 16-bit integer self.light = self.envirophat.light.light() if self.use_leds: self.envirophat.leds.on() # the three color values scaled against the overall light, 0-255 self.light_red, self.light_green, self.light_blue = self.envirophat.light.rgb() if self.use_leds: self.envirophat.leds.off() # accelerometer readings in G ( self.accelerometer_x, self.accelerometer_y, self.accelerometer_z, ) = self.envirophat.motion.accelerometer() # raw magnetometer reading ( self.magnetometer_x, self.magnetometer_y, self.magnetometer_z, ) = self.envirophat.motion.magnetometer() # temperature resolution of BMP280 sensor: 0.01°C self.temperature = round(self.envirophat.weather.temperature(), 2) # pressure resolution of BMP280 sensor: 0.16 Pa, rounding to 0.1 Pa # with conversion to 100 Pa = 1 hPa self.pressure = round(self.envirophat.weather.pressure() / 100.0, 3) # Voltage sensor, reading between 0-3.3V ( self.voltage_0, self.voltage_1, self.voltage_2, self.voltage_3, ) = self.envirophat.analog.read_all()
"""Test Hue init with multiple bridges.""" from aiohue.groups import Groups from aiohue.lights import Lights from aiohue.scenes import Scenes from aiohue.sensors import Sensors import pytest from homeassistant import config_entries from homeassistant.components import hue from homeassistant.components.hue import sensor_base as hue_sensor_base from homeassistant.setup import async_setup_component from tests.async_mock import Mock, patch async def setup_component(hass): """Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( await async_setup_component( hass, hue.DOMAIN, {}, ) is True ) async def test_hue_activate_scene_both_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes both bridges successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_one_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_zero_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) # both were retried assert mock_hue_activate_scene1.call_count == 2 assert mock_hue_activate_scene2.call_count == 2 async def setup_bridge(hass, mock_bridge, config_entry): """Load the Hue light platform with the provided bridge.""" mock_bridge.config_entry = config_entry hass.data[hue.DOMAIN][config_entry.entry_id] = mock_bridge await hass.config_entries.async_forward_entry_setup(config_entry, "light") # To flush out the service call to update the group await hass.async_block_till_done() @pytest.fixture def mock_config_entry1(hass): """Mock a config entry.""" return create_config_entry() @pytest.fixture def mock_config_entry2(hass): """Mock a config entry.""" return create_config_entry() def create_config_entry(): """Mock a config entry.""" return config_entries.ConfigEntry( 1, hue.DOMAIN, "Mock Title", {"host": "mock-host"}, "test", config_entries.CONN_CLASS_LOCAL_POLL, system_options={}, ) @pytest.fixture def mock_bridge1(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) @pytest.fixture def mock_bridge2(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) def create_mock_bridge(hass): """Create a mock Hue bridge.""" bridge = Mock( hass=hass, available=True, authorized=True, allow_unreachable=False, allow_groups=False, api=Mock(), reset_jobs=[], spec=hue.HueBridge, ) bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = [] async def mock_request(method, path, **kwargs): kwargs["method"] = method kwargs["path"] = path bridge.mock_requests.append(kwargs) return {} async def async_request_call(task): await task() bridge.async_request_call = async_request_call bridge.api.config.apiversion = "9.9.9" bridge.api.lights = Lights({}, mock_request) bridge.api.groups = Groups({}, mock_request) bridge.api.sensors = Sensors({}, mock_request) bridge.api.scenes = Scenes({}, mock_request) return bridge
mezz64/home-assistant
tests/components/hue/test_init_multiple_bridges.py
homeassistant/components/envirophat/sensor.py
"""Switch platform integration for Numato USB GPIO expanders.""" import logging from numato_gpio import NumatoGpioError from homeassistant.const import ( CONF_DEVICES, CONF_ID, CONF_SWITCHES, DEVICE_DEFAULT_NAME, ) from homeassistant.helpers.entity import ToggleEntity from . import CONF_INVERT_LOGIC, CONF_PORTS, DATA_API, DOMAIN _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the configured Numato USB GPIO switch ports.""" if discovery_info is None: return api = hass.data[DOMAIN][DATA_API] switches = [] devices = hass.data[DOMAIN][CONF_DEVICES] for device in [d for d in devices if CONF_SWITCHES in d]: device_id = device[CONF_ID] platform = device[CONF_SWITCHES] invert_logic = platform[CONF_INVERT_LOGIC] ports = platform[CONF_PORTS] for port, port_name in ports.items(): try: api.setup_output(device_id, port) api.write_output(device_id, port, 1 if invert_logic else 0) except NumatoGpioError as err: _LOGGER.error( "Failed to initialize switch '%s' on Numato device %s port %s: %s", port_name, device_id, port, err, ) continue switches.append( NumatoGpioSwitch( port_name, device_id, port, invert_logic, api, ) ) add_entities(switches, True) class NumatoGpioSwitch(ToggleEntity): """Representation of a Numato USB GPIO switch port.""" def __init__(self, name, device_id, port, invert_logic, api): """Initialize the port.""" self._name = name or DEVICE_DEFAULT_NAME self._device_id = device_id self._port = port self._invert_logic = invert_logic self._state = False self._api = api @property def name(self): """Return the name of the switch.""" return self._name @property def should_poll(self): """No polling needed.""" return False @property def is_on(self): """Return true if port is turned on.""" return self._state def turn_on(self, **kwargs): """Turn the port on.""" try: self._api.write_output( self._device_id, self._port, 0 if self._invert_logic else 1 ) self._state = True self.schedule_update_ha_state() except NumatoGpioError as err: _LOGGER.error( "Failed to turn on Numato device %s port %s: %s", self._device_id, self._port, err, ) def turn_off(self, **kwargs): """Turn the port off.""" try: self._api.write_output( self._device_id, self._port, 1 if self._invert_logic else 0 ) self._state = False self.schedule_update_ha_state() except NumatoGpioError as err: _LOGGER.error( "Failed to turn off Numato device %s port %s: %s", self._device_id, self._port, err, )
"""Test Hue init with multiple bridges.""" from aiohue.groups import Groups from aiohue.lights import Lights from aiohue.scenes import Scenes from aiohue.sensors import Sensors import pytest from homeassistant import config_entries from homeassistant.components import hue from homeassistant.components.hue import sensor_base as hue_sensor_base from homeassistant.setup import async_setup_component from tests.async_mock import Mock, patch async def setup_component(hass): """Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( await async_setup_component( hass, hue.DOMAIN, {}, ) is True ) async def test_hue_activate_scene_both_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes both bridges successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_one_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_zero_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) # both were retried assert mock_hue_activate_scene1.call_count == 2 assert mock_hue_activate_scene2.call_count == 2 async def setup_bridge(hass, mock_bridge, config_entry): """Load the Hue light platform with the provided bridge.""" mock_bridge.config_entry = config_entry hass.data[hue.DOMAIN][config_entry.entry_id] = mock_bridge await hass.config_entries.async_forward_entry_setup(config_entry, "light") # To flush out the service call to update the group await hass.async_block_till_done() @pytest.fixture def mock_config_entry1(hass): """Mock a config entry.""" return create_config_entry() @pytest.fixture def mock_config_entry2(hass): """Mock a config entry.""" return create_config_entry() def create_config_entry(): """Mock a config entry.""" return config_entries.ConfigEntry( 1, hue.DOMAIN, "Mock Title", {"host": "mock-host"}, "test", config_entries.CONN_CLASS_LOCAL_POLL, system_options={}, ) @pytest.fixture def mock_bridge1(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) @pytest.fixture def mock_bridge2(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) def create_mock_bridge(hass): """Create a mock Hue bridge.""" bridge = Mock( hass=hass, available=True, authorized=True, allow_unreachable=False, allow_groups=False, api=Mock(), reset_jobs=[], spec=hue.HueBridge, ) bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = [] async def mock_request(method, path, **kwargs): kwargs["method"] = method kwargs["path"] = path bridge.mock_requests.append(kwargs) return {} async def async_request_call(task): await task() bridge.async_request_call = async_request_call bridge.api.config.apiversion = "9.9.9" bridge.api.lights = Lights({}, mock_request) bridge.api.groups = Groups({}, mock_request) bridge.api.sensors = Sensors({}, mock_request) bridge.api.scenes = Scenes({}, mock_request) return bridge
mezz64/home-assistant
tests/components/hue/test_init_multiple_bridges.py
homeassistant/components/numato/switch.py
"""Tracking for bluetooth devices.""" import asyncio import logging from typing import List, Optional, Set, Tuple # pylint: disable=import-error import bluetooth from bt_proximity import BluetoothRSSI import voluptuous as vol from homeassistant.components.device_tracker import PLATFORM_SCHEMA from homeassistant.components.device_tracker.const import ( CONF_SCAN_INTERVAL, CONF_TRACK_NEW, DEFAULT_TRACK_NEW, SCAN_INTERVAL, SOURCE_TYPE_BLUETOOTH, ) from homeassistant.components.device_tracker.legacy import ( YAML_DEVICES, async_load_config, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.typing import HomeAssistantType from .const import DOMAIN, SERVICE_UPDATE _LOGGER = logging.getLogger(__name__) BT_PREFIX = "BT_" CONF_REQUEST_RSSI = "request_rssi" CONF_DEVICE_ID = "device_id" DEFAULT_DEVICE_ID = -1 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_TRACK_NEW): cv.boolean, vol.Optional(CONF_REQUEST_RSSI): cv.boolean, vol.Optional(CONF_DEVICE_ID, default=DEFAULT_DEVICE_ID): vol.All( vol.Coerce(int), vol.Range(min=-1) ), } ) def is_bluetooth_device(device) -> bool: """Check whether a device is a bluetooth device by its mac.""" return device.mac and device.mac[:3].upper() == BT_PREFIX def discover_devices(device_id: int) -> List[Tuple[str, str]]: """Discover Bluetooth devices.""" result = bluetooth.discover_devices( duration=8, lookup_names=True, flush_cache=True, lookup_class=False, device_id=device_id, ) _LOGGER.debug("Bluetooth devices discovered = %d", len(result)) return result async def see_device( hass: HomeAssistantType, async_see, mac: str, device_name: str, rssi=None ) -> None: """Mark a device as seen.""" attributes = {} if rssi is not None: attributes["rssi"] = rssi await async_see( mac=f"{BT_PREFIX}{mac}", host_name=device_name, attributes=attributes, source_type=SOURCE_TYPE_BLUETOOTH, ) async def get_tracking_devices(hass: HomeAssistantType) -> Tuple[Set[str], Set[str]]: """ Load all known devices. We just need the devices so set consider_home and home range to 0 """ yaml_path: str = hass.config.path(YAML_DEVICES) devices = await async_load_config(yaml_path, hass, 0) bluetooth_devices = [device for device in devices if is_bluetooth_device(device)] devices_to_track: Set[str] = { device.mac[3:] for device in bluetooth_devices if device.track } devices_to_not_track: Set[str] = { device.mac[3:] for device in bluetooth_devices if not device.track } return devices_to_track, devices_to_not_track def lookup_name(mac: str) -> Optional[str]: """Lookup a Bluetooth device name.""" _LOGGER.debug("Scanning %s", mac) return bluetooth.lookup_name(mac, timeout=5) async def async_setup_scanner( hass: HomeAssistantType, config: dict, async_see, discovery_info=None ): """Set up the Bluetooth Scanner.""" device_id: int = config[CONF_DEVICE_ID] interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL) request_rssi = config.get(CONF_REQUEST_RSSI, False) update_bluetooth_lock = asyncio.Lock() # If track new devices is true discover new devices on startup. track_new: bool = config.get(CONF_TRACK_NEW, DEFAULT_TRACK_NEW) _LOGGER.debug("Tracking new devices is set to %s", track_new) devices_to_track, devices_to_not_track = await get_tracking_devices(hass) if not devices_to_track and not track_new: _LOGGER.debug("No Bluetooth devices to track and not tracking new devices") if request_rssi: _LOGGER.debug("Detecting RSSI for devices") async def perform_bluetooth_update(): """Discover Bluetooth devices and update status.""" _LOGGER.debug("Performing Bluetooth devices discovery and update") tasks = [] try: if track_new: devices = await hass.async_add_executor_job(discover_devices, device_id) for mac, device_name in devices: if mac not in devices_to_track and mac not in devices_to_not_track: devices_to_track.add(mac) for mac in devices_to_track: device_name = await hass.async_add_executor_job(lookup_name, mac) if device_name is None: # Could not lookup device name continue rssi = None if request_rssi: client = BluetoothRSSI(mac) rssi = await hass.async_add_executor_job(client.request_rssi) client.close() tasks.append(see_device(hass, async_see, mac, device_name, rssi)) if tasks: await asyncio.wait(tasks) except bluetooth.BluetoothError: _LOGGER.exception("Error looking up Bluetooth device") async def update_bluetooth(now=None): """Lookup Bluetooth devices and update status.""" # If an update is in progress, we don't do anything if update_bluetooth_lock.locked(): _LOGGER.debug( "Previous execution of update_bluetooth is taking longer than the scheduled update of interval %s", interval, ) return async with update_bluetooth_lock: await perform_bluetooth_update() async def handle_manual_update_bluetooth(call): """Update bluetooth devices on demand.""" await update_bluetooth() hass.async_create_task(update_bluetooth()) async_track_time_interval(hass, update_bluetooth, interval) hass.services.async_register(DOMAIN, SERVICE_UPDATE, handle_manual_update_bluetooth) return True
"""Test Hue init with multiple bridges.""" from aiohue.groups import Groups from aiohue.lights import Lights from aiohue.scenes import Scenes from aiohue.sensors import Sensors import pytest from homeassistant import config_entries from homeassistant.components import hue from homeassistant.components.hue import sensor_base as hue_sensor_base from homeassistant.setup import async_setup_component from tests.async_mock import Mock, patch async def setup_component(hass): """Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( await async_setup_component( hass, hue.DOMAIN, {}, ) is True ) async def test_hue_activate_scene_both_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes both bridges successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_one_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=None ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) mock_hue_activate_scene1.assert_called_once() mock_hue_activate_scene2.assert_called_once() async def test_hue_activate_scene_zero_responds( hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2 ): """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) await setup_bridge(hass, mock_bridge1, mock_config_entry1) await setup_bridge(hass, mock_bridge2, mock_config_entry2) with patch.object( mock_bridge1, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene1, patch.object( mock_bridge2, "hue_activate_scene", return_value=False ) as mock_hue_activate_scene2: await hass.services.async_call( "hue", "hue_activate_scene", {"group_name": "group_2", "scene_name": "my_scene"}, blocking=True, ) # both were retried assert mock_hue_activate_scene1.call_count == 2 assert mock_hue_activate_scene2.call_count == 2 async def setup_bridge(hass, mock_bridge, config_entry): """Load the Hue light platform with the provided bridge.""" mock_bridge.config_entry = config_entry hass.data[hue.DOMAIN][config_entry.entry_id] = mock_bridge await hass.config_entries.async_forward_entry_setup(config_entry, "light") # To flush out the service call to update the group await hass.async_block_till_done() @pytest.fixture def mock_config_entry1(hass): """Mock a config entry.""" return create_config_entry() @pytest.fixture def mock_config_entry2(hass): """Mock a config entry.""" return create_config_entry() def create_config_entry(): """Mock a config entry.""" return config_entries.ConfigEntry( 1, hue.DOMAIN, "Mock Title", {"host": "mock-host"}, "test", config_entries.CONN_CLASS_LOCAL_POLL, system_options={}, ) @pytest.fixture def mock_bridge1(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) @pytest.fixture def mock_bridge2(hass): """Mock a Hue bridge.""" return create_mock_bridge(hass) def create_mock_bridge(hass): """Create a mock Hue bridge.""" bridge = Mock( hass=hass, available=True, authorized=True, allow_unreachable=False, allow_groups=False, api=Mock(), reset_jobs=[], spec=hue.HueBridge, ) bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = [] async def mock_request(method, path, **kwargs): kwargs["method"] = method kwargs["path"] = path bridge.mock_requests.append(kwargs) return {} async def async_request_call(task): await task() bridge.async_request_call = async_request_call bridge.api.config.apiversion = "9.9.9" bridge.api.lights = Lights({}, mock_request) bridge.api.groups = Groups({}, mock_request) bridge.api.sensors = Sensors({}, mock_request) bridge.api.scenes = Scenes({}, mock_request) return bridge
mezz64/home-assistant
tests/components/hue/test_init_multiple_bridges.py
homeassistant/components/bluetooth_tracker/device_tracker.py